Update scripts to account for removal of the context override to bpy.ops
[blender-addons.git] / mesh_tissue / utils.py
blobeb1642e33394acd47176bd418d4a421ce88deb33
1 # SPDX-License-Identifier: GPL-2.0-or-later
3 import bpy, bmesh
4 import threading
5 import numpy as np
6 import multiprocessing
7 from multiprocessing import Process, Pool
8 from mathutils import Vector, Matrix
9 from math import *
10 try: from .numba_functions import *
11 except: pass
13 from . import config
15 def use_numba_tess():
16 tissue_addon = bpy.context.preferences.addons[__package__]
17 if 'use_numba_tess' in tissue_addon.preferences.keys():
18 return tissue_addon.preferences['use_numba_tess']
19 else:
20 return True
22 def tissue_time(start_time, name, levels=0):
23 tissue_addon = bpy.context.preferences.addons[__package__]
24 end_time = time.time()
25 if 'print_stats' in tissue_addon.preferences.keys():
26 ps = tissue_addon.preferences['print_stats']
27 else:
28 ps = 1
29 if levels < ps:
30 if "Tissue: " in name: head = ""
31 else: head = " "
32 if start_time:
33 print('{}{}{} in {:.4f} sec'.format(head, "| "*levels, name, end_time - start_time))
34 else:
35 print('{}{}{}'.format(head, "| "*levels, name))
36 return end_time
39 # ------------------------------------------------------------------
40 # MATH
41 # ------------------------------------------------------------------
43 def _np_broadcast(arrays):
44 shapes = [arr.shape for arr in arrays]
45 for i in range(len(shapes[0])):
46 ish = [sh[i] for sh in shapes]
47 max_len = max(ish)
48 for j in range(len(arrays)):
49 leng = ish[j]
50 if leng == 1: arrays[j] = np.repeat(arrays[j], max_len, axis=i)
51 for arr in arrays:
52 arr = arr.flatten()
53 #vt = v0 + (v1 - v0) * t
54 return arrays
56 def lerp(a, b, t):
57 return a + (b - a) * t
59 def _lerp2(v1, v2, v3, v4, v):
60 v12 = v1.lerp(v2,v.x) # + (v2 - v1) * v.x
61 v34 = v3.lerp(v4,v.x) # + (v4 - v3) * v.x
62 return v12.lerp(v34, v.y)# + (v34 - v12) * v.y
64 def lerp2(v1, v2, v3, v4, v):
65 v12 = v1 + (v2 - v1) * v.x
66 v34 = v3 + (v4 - v3) * v.x
67 v = v12 + (v34 - v12) * v.y
68 return v
70 def lerp3(v1, v2, v3, v4, v):
71 loc = lerp2(v1.co, v2.co, v3.co, v4.co, v)
72 nor = lerp2(v1.normal, v2.normal, v3.normal, v4.normal, v)
73 nor.normalize()
74 return loc + nor * v.z
76 import sys
77 def np_lerp2(v00, v10, v01, v11, vx, vy, mode=''):
78 if 'numba' in sys.modules and use_numba_tess():
79 if mode == 'verts':
80 co2 = numba_interp_points(v00, v10, v01, v11, vx, vy)
81 elif mode == 'shapekeys':
82 co2 = numba_interp_points_sk(v00, v10, v01, v11, vx, vy)
83 else:
84 co2 = numba_lerp2(v00, v10, v01, v11, vx, vy)
85 else:
86 co0 = v00 + (v10 - v00) * vx
87 co1 = v01 + (v11 - v01) * vx
88 co2 = co0 + (co1 - co0) * vy
89 return co2
91 def calc_thickness(co2,n2,vz,a,weight):
92 if 'numba' in sys.modules and use_numba_tess():
93 if len(co2.shape) == 3:
94 if type(a) != np.ndarray:
95 a = np.ones(len(co2)).reshape((-1,1,1))
96 if type(weight) != np.ndarray:
97 weight = np.ones(len(co2)).reshape((-1,1,1))
98 co3 = numba_calc_thickness_area_weight(co2,n2,vz,a,weight)
99 elif len(co2.shape) == 4:
100 n_patches = co2.shape[0]
101 n_sk = co2.shape[1]
102 n_verts = co2.shape[2]
103 if type(a) != np.ndarray:
104 a = np.ones(n_patches).reshape((n_patches,1,1,1))
105 if type(weight) != np.ndarray:
106 weight = np.ones(n_patches).reshape((n_patches,1,1,1))
107 na = a.shape[1]-1
108 nw = weight.shape[1]-1
109 co3 = np.empty((n_sk,n_patches,n_verts,3))
110 for i in range(n_sk):
111 co3[i] = numba_calc_thickness_area_weight(co2[:,i],n2[:,i],vz[:,i],a[:,min(i,na)],weight[:,min(i,nw)])
112 co3 = co3.swapaxes(0,1)
113 else:
114 use_area = type(a) == np.ndarray
115 use_weight = type(weight) == np.ndarray
116 if use_area:
117 if use_weight:
118 co3 = co2 + n2 * vz * a * weight
119 else:
120 co3 = co2 + n2 * vz * a
121 else:
122 if use_weight:
123 co3 = co2 + n2 * vz * weight
124 else:
125 co3 = co2 + n2 * vz
126 return co3
128 def combine_and_flatten(arrays):
129 if 'numba' in sys.modules:
130 new_list = numba_combine_and_flatten(arrays)
131 else:
132 new_list = np.concatenate(arrays, axis=0)
133 new_list = new_list.flatten().tolist()
134 return new_list
136 def np_interp2(grid, vx, vy):
137 grid_shape = grid.shape[-2:]
138 levels = len(grid.shape)-2
139 nu = grid_shape[0]
140 nv = grid_shape[1]
141 u = np.arange(nu)/(nu-1)
142 v = np.arange(nv)/(nv-1)
143 u_shape = [1]*levels + [nu]
144 v_shape = [1]*levels + [nv]
146 co0 = np.interp()
147 co1 = np.interp()
148 co2 = np.interp()
149 return co2
151 def flatten_vector(vec, x, y):
153 Find planar vector according to two axis.
154 :arg vec: Input vector.
155 :type vec: :class:'mathutils.Vector'
156 :arg x: First axis.
157 :type x: :class:'mathutils.Vector'
158 :arg y: Second axis.
159 :type y: :class:'mathutils.Vector'
160 :return: Projected 2D Vector.
161 :rtype: :class:'mathutils.Vector'
163 vx = vec.project(x)
164 vy = vec.project(y)
165 mult = 1 if vx.dot(x) > 0 else -1
166 vx = mult*vx.length
167 mult = 1 if vy.dot(y) > 0 else -1
168 vy = mult*vy.length
169 return Vector((vx, vy))
171 def vector_rotation(vec):
173 Find vector rotation according to X axis.
174 :arg vec: Input vector.
175 :type vec: :class:'mathutils.Vector'
176 :return: Angle in radians.
177 :rtype: float
179 v0 = Vector((1,0))
180 ang = Vector.angle_signed(vec, v0)
181 if ang < 0: ang = 2*pi + ang
182 return ang
184 # ------------------------------------------------------------------
185 # SCENE
186 # ------------------------------------------------------------------
188 def set_animatable_fix_handler(self, context):
190 Prevent Blender Crashes with handlers
192 old_handlers = []
193 blender_handlers = bpy.app.handlers.render_init
194 for h in blender_handlers:
195 if "turn_off_animatable" in str(h):
196 old_handlers.append(h)
197 for h in old_handlers: blender_handlers.remove(h)
198 blender_handlers.append(turn_off_animatable)
199 return
201 def turn_off_animatable(scene):
203 Prevent Blender Crashes with handlers
205 for o in [o for o in bpy.data.objects if o.type == 'MESH']:
206 o.tissue_tessellate.bool_run = False
207 #if not o.reaction_diffusion_settings.bool_cache:
208 # o.reaction_diffusion_settings.run = False
209 #except: pass
210 return
212 # ------------------------------------------------------------------
213 # OBJECTS
214 # ------------------------------------------------------------------
216 def convert_object_to_mesh(ob, apply_modifiers=True, preserve_status=True):
217 try: ob.name
218 except: return None
219 if ob.type != 'MESH':
220 if not apply_modifiers:
221 mod_visibility = [m.show_viewport for m in ob.modifiers]
222 for m in ob.modifiers: m.show_viewport = False
223 #ob.modifiers.update()
224 #dg = bpy.context.evaluated_depsgraph_get()
225 #ob_eval = ob.evaluated_get(dg)
226 #me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg)
227 me = simple_to_mesh(ob)
228 new_ob = bpy.data.objects.new(ob.data.name, me)
229 new_ob.location, new_ob.matrix_world = ob.location, ob.matrix_world
230 if not apply_modifiers:
231 for m,vis in zip(ob.modifiers,mod_visibility): m.show_viewport = vis
232 else:
233 if apply_modifiers:
234 new_ob = ob.copy()
235 new_me = simple_to_mesh(ob)
236 new_ob.modifiers.clear()
237 new_ob.data = new_me
238 else:
239 new_ob = ob.copy()
240 new_ob.data = ob.data.copy()
241 new_ob.modifiers.clear()
242 bpy.context.collection.objects.link(new_ob)
243 if preserve_status:
244 new_ob.select_set(False)
245 else:
246 for o in bpy.context.view_layer.objects: o.select_set(False)
247 new_ob.select_set(True)
248 bpy.context.view_layer.objects.active = new_ob
249 return new_ob
251 def simple_to_mesh(ob, depsgraph=None):
253 Convert object to mesh applying Modifiers and Shape Keys
255 #global evaluatedDepsgraph
256 if depsgraph == None:
257 if config.evaluatedDepsgraph == None:
258 dg = bpy.context.evaluated_depsgraph_get()
259 else: dg = config.evaluatedDepsgraph
260 else:
261 dg = depsgraph
262 ob_eval = ob.evaluated_get(dg)
263 me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg)
264 return me
266 def _join_objects(context, objects, link_to_scene=True, make_active=True):
267 C = context
268 bm = bmesh.new()
270 materials = {}
271 faces_materials = []
272 if config.evaluatedDepsgraph == None:
273 dg = C.evaluated_depsgraph_get()
274 else: dg = config.evaluatedDepsgraph
276 for o in objects:
277 bm.from_object(o, dg)
278 # add object's material to the dictionary
279 for m in o.data.materials:
280 if m not in materials: materials[m] = len(materials)
281 for f in o.data.polygons:
282 index = f.material_index
283 mat = o.material_slots[index].material
284 new_index = materials[mat]
285 faces_materials.append(new_index)
286 bm.verts.ensure_lookup_table()
287 bm.edges.ensure_lookup_table()
288 bm.faces.ensure_lookup_table()
289 # assign new indexes
290 for index, f in zip(faces_materials, bm.faces): f.material_index = index
291 # create object
292 me = bpy.data.meshes.new('joined')
293 bm.to_mesh(me)
294 me.update()
295 ob = bpy.data.objects.new('joined', me)
296 if link_to_scene: C.collection.objects.link(ob)
297 # make active
298 if make_active:
299 for o in C.view_layer.objects: o.select_set(False)
300 ob.select_set(True)
301 C.view_layer.objects.active = ob
302 # add materials
303 for m in materials.keys(): ob.data.materials.append(m)
305 return ob
307 def join_objects(context, objects):
308 generated_data = [o.data for o in objects]
309 context.view_layer.update()
310 for o in context.view_layer.objects:
311 o.select_set(o in objects)
312 bpy.ops.object.join()
313 new_ob = context.view_layer.objects.active
314 new_ob.select_set(True)
315 for me in generated_data:
316 if me != new_ob.data:
317 bpy.data.meshes.remove(me)
318 return new_ob
320 def join_objects(objects):
321 override = bpy.context.copy()
322 new_ob = objects[0]
323 override['active_object'] = new_ob
324 override['selected_editable_objects'] = objects
325 bpy.ops.object.join(override)
326 return new_ob
328 def repeat_mesh(me, n):
330 Return Mesh data adding and applying an array without offset (Slower)
332 bm = bmesh.new()
333 for i in range(n): bm.from_mesh(me)
334 new_me = me.copy()
335 bm.to_mesh(new_me)
336 bm.free()
337 return new_me
339 def array_mesh(ob, n):
341 Return Mesh data adding and applying an array without offset
343 arr = ob.modifiers.new('Repeat','ARRAY')
344 arr.relative_offset_displace[0] = 0
345 arr.count = n
346 # with bpy.context.temp_override(active_object=ob):
347 # bpy.ops.object.modifier_apply(modifier='Repeat')
348 # me = ob.data
349 ob.modifiers.update()
351 dg = bpy.context.evaluated_depsgraph_get()
352 me = simple_to_mesh(ob, depsgraph=dg)
353 ob.modifiers.remove(arr)
354 return me
356 def array_mesh_object(ob, n):
358 Return Mesh data adding and applying an array without offset
360 arr = ob.modifiers.new('Repeat','ARRAY')
361 arr.relative_offset_displace[0] = 0
362 arr.count = n
363 ob.modifiers.update()
364 override = bpy.context.copy()
365 override['active_object'] = ob
366 override = {'active_object': ob}
367 bpy.ops.object.modifier_apply(override, modifier=arr.name)
368 return ob
371 def get_mesh_before_subs(ob):
372 not_allowed = ('FLUID_SIMULATION', 'ARRAY', 'BEVEL', 'BOOLEAN', 'BUILD',
373 'DECIMATE', 'EDGE_SPLIT', 'MASK', 'MIRROR', 'REMESH',
374 'SCREW', 'SOLIDIFY', 'TRIANGULATE', 'WIREFRAME', 'SKIN',
375 'EXPLODE', 'PARTICLE_INSTANCE', 'PARTICLE_SYSTEM', 'SMOKE')
376 subs = 0
377 hide_mods = []
378 mods_visibility = []
379 for m in ob.modifiers:
380 hide_mods.append(m)
381 mods_visibility.append(m.show_viewport)
382 if m.type in ('SUBSURF','MULTIRES'):
383 hide_mods = [m]
384 subs = m.levels
385 elif m.type in not_allowed:
386 subs = 0
387 hide_mods = []
388 mods_visibility = []
389 for m in hide_mods: m.show_viewport = False
390 me = simple_to_mesh(ob)
391 for m, vis in zip(hide_mods,mods_visibility): m.show_viewport = vis
392 return me, subs
394 # ------------------------------------------------------------------
395 # MESH FUNCTIONS
396 # ------------------------------------------------------------------
398 def calc_verts_area(me):
399 n_verts = len(me.vertices)
400 n_faces = len(me.polygons)
401 vareas = np.zeros(n_verts)
402 vcount = np.zeros(n_verts)
403 parea = [0]*n_faces
404 pverts = [0]*n_faces*4
405 me.polygons.foreach_get('area', parea)
406 me.polygons.foreach_get('vertices', pverts)
407 parea = np.array(parea)
408 pverts = np.array(pverts).reshape((n_faces, 4))
409 for a, verts in zip(parea,pverts):
410 vareas[verts] += a
411 vcount[verts] += 1
412 return vareas / vcount
414 def calc_verts_area_bmesh(me):
415 bm = bmesh.new()
416 bm.from_mesh(me)
417 bm.verts.ensure_lookup_table()
418 verts_area = np.zeros(len(me.vertices))
419 for v in bm.verts:
420 area = 0
421 faces = v.link_faces
422 for f in faces:
423 area += f.calc_area()
424 verts_area[v.index] = area if area == 0 else area/len(faces)
425 bm.free()
426 return verts_area
428 import time
430 def get_patches____(me_low, me_high, sides, subs, bool_selection, bool_material_id, material_id):
431 nv = len(me_low.vertices) # number of vertices
432 ne = len(me_low.edges) # number of edges
433 nf = len(me_low.polygons) # number of polygons
434 n = 2**subs + 1 # number of vertices along each patch edge
435 nev = ne * n # number of vertices along the subdivided edges
436 nevi = nev - 2*ne # internal vertices along subdividede edges
438 n0 = 2**(subs-1) - 1
440 # filtered polygonal faces
441 poly_sides = np.array([len(p.vertices) for p in me_low.polygons])
442 mask = poly_sides == sides
443 if bool_material_id:
444 mask_material = [1]*nf
445 me_low.polygons.foreach_get('material_index',mask_material)
446 mask_material = np.array(mask_material) == material_id
447 mask = np.logical_and(mask,mask_material)
448 if bool_selection:
449 mask_selection = [True]*nf
450 me_low.polygons.foreach_get('select',mask_selection)
451 mask_selection = np.array(mask_selection)
452 mask = np.logical_and(mask,mask_selection)
453 polys = np.array(me_low.polygons)[mask]
454 mult = n0**2 + n0
455 ps = poly_sides * mult + 1
456 ps = np.insert(ps,0,nv + nevi, axis=0)[:-1]
457 ips = ps.cumsum()[mask] # incremental polygon sides
458 nf = len(polys)
460 # when subdivided quad faces follows a different pattern
461 if sides == 4:
462 n_patches = nf
463 else:
464 n_patches = nf*sides
466 if sides == 4:
467 patches = np.zeros((nf,n,n),dtype='int')
468 verts = [[vv for vv in p.vertices] for p in polys if len(p.vertices) == sides]
469 verts = np.array(verts).reshape((-1,sides))
471 # filling corners
473 patches[:,0,0] = verts[:,0]
474 patches[:,n-1,0] = verts[:,1]
475 patches[:,n-1,n-1] = verts[:,2]
476 patches[:,0,n-1] = verts[:,3]
478 if subs != 0:
479 shift_verts = np.roll(verts, -1, axis=1)[:,:,None]
480 edge_keys = np.concatenate((shift_verts, verts[:,:,None]), axis=2)
481 edge_keys.sort()
483 edge_verts = np.array(me_low.edge_keys) # edges keys
484 edges_index = np.zeros((ne,ne),dtype='int')
485 edges_index[edge_verts[:,0],edge_verts[:,1]] = np.arange(ne)
487 evi = np.arange(nevi) + nv
488 evi = evi.reshape(ne,n-2) # edges inner verts
489 straight = np.arange(n-2)+1
490 inverted = np.flip(straight)
491 inners = np.array([[j*(n-2)+i for j in range(n-2)] for i in range(n-2)])
493 ek1 = np.array(me_high.edge_keys) # edges keys
494 ids0 = np.arange(ne)*(n-1) # edge keys highres
495 keys0 = ek1[ids0] # first inner edge
496 keys1 = ek1[ids0 + n-2] # last inner edge
497 keys = np.concatenate((keys0,keys1))
498 pick_verts = np.array((inverted,straight))
500 patch_index = np.arange(nf)[:,None,None]
502 # edge 0
503 e0 = edge_keys[:,0] # get edge key (faces, 2)
504 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
505 edge_verts = evi[edge_id] # indexes of inner vertices
506 test = np.concatenate((verts[:,0,None], edge_verts[:,0,None]),axis=1)
507 dir = (test[:,None] == keys).all(2).any(1).astype('int8')
508 #dir = np.full(verts[:,0].shape, 0, dtype='int8')
509 ids = pick_verts[dir][:,None,:] # indexes order along the side
510 patches[patch_index,ids,0] = edge_verts[:,None,:] # assign indexes
511 #patches[:,msk] = inverted # np.flip(patches[msk])
513 # edge 1
514 e0 = edge_keys[:,1] # get edge key (faces, 2)
515 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
516 edge_verts = evi[edge_id] # indexes of inner vertices
517 test = np.concatenate((verts[:,1,None], edge_verts[:,0,None]),axis=1)
518 dir = (test[:,None] == keys).all(2).any(1).astype('int8')
519 ids = pick_verts[dir][:,:,None] # indexes order along the side
520 patches[patch_index,n-1,ids] = edge_verts[:,:,None] # assign indexes
522 # edge 2
523 e0 = edge_keys[:,2] # get edge key (faces, 2)
524 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
525 edge_verts = evi[edge_id] # indexes of inner vertices
526 test = np.concatenate((verts[:,3,None], edge_verts[:,0,None]),axis=1)
527 dir = (test[:,None] == keys).all(2).any(1).astype('int8')
528 ids = pick_verts[dir][:,None,:] # indexes order along the side
529 patches[patch_index,ids,n-1] = edge_verts[:,None,:] # assign indexes
531 # edge 3
532 e0 = edge_keys[:,3] # get edge key (faces, 2)
533 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
534 edge_verts = evi[edge_id] # indexes of inner vertices
535 test = np.concatenate((verts[:,0,None], edge_verts[:,0,None]),axis=1)
536 dir = (test[:,None] == keys).all(2).any(1).astype('int8')
537 ids = pick_verts[dir][:,:,None] # indexes order along the side
538 patches[patch_index,0,ids] = edge_verts[:,:,None] # assign indexes
540 # fill inners
541 patches[:,1:-1,1:-1] = inners[None,:,:] + ips[:,None,None]
543 #end_time = time.time()
544 #print('Tissue: Got Patches in {:.4f} sec'.format(end_time-start_time))
546 return patches, mask
548 def tessellate_prepare_component(ob1, props):
549 mode = props['mode']
550 bounds_x = props['bounds_x']
551 bounds_y = props['bounds_y']
552 scale_mode = props['scale_mode']
553 normals_mode = props['normals_mode']
554 zscale = props['zscale']
555 offset = props['offset']
556 use_origin_offset = props['use_origin_offset']
557 bool_shapekeys = props['bool_shapekeys']
559 thres = 0.005
561 me1 = ob1.data
563 # Component statistics
564 n_verts = len(me1.vertices)
566 # Component bounding box
567 min_c = Vector((0, 0, 0))
568 max_c = Vector((0, 0, 0))
569 first = True
570 for v in me1.vertices:
571 vert = v.co
572 if vert[0] < min_c[0] or first:
573 min_c[0] = vert[0]
574 if vert[1] < min_c[1] or first:
575 min_c[1] = vert[1]
576 if vert[2] < min_c[2] or first:
577 min_c[2] = vert[2]
578 if vert[0] > max_c[0] or first:
579 max_c[0] = vert[0]
580 if vert[1] > max_c[1] or first:
581 max_c[1] = vert[1]
582 if vert[2] > max_c[2] or first:
583 max_c[2] = vert[2]
584 first = False
585 bb = max_c - min_c
587 # adaptive XY
588 verts1 = []
589 for v in me1.vertices:
590 if mode == 'BOUNDS':
591 vert = v.co - min_c # (ob1.matrix_world * v.co) - min_c
592 if use_origin_offset: vert[2] = v.co[2]
593 vert[0] = vert[0] / bb[0] if bb[0] != 0 else 0.5
594 vert[1] = vert[1] / bb[1] if bb[1] != 0 else 0.5
595 if scale_mode == 'CONSTANT' or normals_mode in ('OBJECT', 'SHAPEKEYS'):
596 if not use_origin_offset:
597 vert[2] = vert[2] / bb[2] if bb[2] != 0 else 0
598 vert[2] = vert[2] - 0.5 + offset * 0.5
599 else:
600 if not use_origin_offset:
601 vert[2] = vert[2] + (-0.5 + offset * 0.5) * bb[2]
602 vert[2] *= zscale
603 elif mode == 'LOCAL':
604 vert = v.co.xyz
605 vert[2] *= zscale
606 #vert[2] = (vert[2] - min_c[2] + (-0.5 + offset * 0.5) * bb[2]) * zscale
607 elif mode == 'GLOBAL':
608 vert = ob1.matrix_world @ v.co
609 vert[2] *= zscale
610 try:
611 for sk in me1.shape_keys.key_blocks:
612 sk.data[v.index].co = ob1.matrix_world @ sk.data[v.index].co
613 except: pass
614 v.co = vert
616 # ShapeKeys
617 if bool_shapekeys and ob1.data.shape_keys:
618 for sk in ob1.data.shape_keys.key_blocks:
619 source = sk.data
620 _sk_uv_quads = [0]*len(verts1)
621 _sk_uv = [0]*len(verts1)
622 for i, sk_v in enumerate(source):
623 if mode == 'BOUNDS':
624 sk_vert = sk_v.co - min_c
625 if use_origin_offset: sk_vert[2] = sk_v.co[2]
626 sk_vert[0] = (sk_vert[0] / bb[0] if bb[0] != 0 else 0.5)
627 sk_vert[1] = (sk_vert[1] / bb[1] if bb[1] != 0 else 0.5)
628 if scale_mode == 'CONSTANT' or normals_mode in ('OBJECT', 'SHAPEKEYS'):
629 if not use_origin_offset:
630 sk_vert[2] = (sk_vert[2] / bb[2] if bb[2] != 0 else sk_vert[2])
631 sk_vert[2] = sk_vert[2] - 0.5 + offset * 0.5
632 else:
633 if not use_origin_offset:
634 sk_vert[2] = sk_vert[2] + (- 0.5 + offset * 0.5) * bb[2]
635 sk_vert[2] *= zscale
636 elif mode == 'LOCAL':
637 sk_vert = sk_v.co
638 sk_vert[2] *= zscale
639 elif mode == 'GLOBAL':
640 sk_vert = sk_v.co
641 sk_vert[2] *= zscale
642 sk_v.co = sk_vert
644 if mode != 'BOUNDS' and (bounds_x != 'EXTEND' or bounds_y != 'EXTEND'):
645 ob1.active_shape_key_index = 0
646 bm = bmesh.new()
647 bm.from_mesh(me1)
648 # Bound X
649 planes_co = []
650 planes_no = []
651 bounds = []
652 if bounds_x != 'EXTEND':
653 planes_co += [(0,0,0), (1,0,0)]
654 planes_no += [(-1,0,0), (1,0,0)]
655 bounds += [bounds_x, bounds_x]
656 if bounds_y != 'EXTEND':
657 planes_co += [(0,0,0), (0,1,0)]
658 planes_no += [(0,-1,0), (0,1,0)]
659 bounds += [bounds_y, bounds_y]
660 for co, norm, bound in zip(planes_co, planes_no, bounds):
661 count = 0
662 while True:
663 moved = 0
664 original_edges = list(bm.edges)
665 geom = list(bm.verts) + list(bm.edges) + list(bm.faces)
666 bisect = bmesh.ops.bisect_plane(bm, geom=geom, dist=0,
667 plane_co=co, plane_no=norm, use_snap_center=False,
668 clear_outer=bound=='CLIP', clear_inner=False
670 geom = bisect['geom']
671 cut_edges = [g for g in bisect['geom_cut'] if type(g)==bmesh.types.BMEdge]
672 cut_verts = [g for g in bisect['geom_cut'] if type(g)==bmesh.types.BMVert]
674 if bound!='CLIP':
675 for e in cut_edges:
676 seam = True
677 # Prevent glitches
678 for e1 in original_edges:
679 match_00 = (e.verts[0].co-e1.verts[0].co).length < thres
680 match_11 = (e.verts[1].co-e1.verts[1].co).length < thres
681 match_01 = (e.verts[0].co-e1.verts[1].co).length < thres
682 match_10 = (e.verts[1].co-e1.verts[0].co).length < thres
683 if (match_00 and match_11) or (match_01 and match_10):
684 seam = False
685 break
686 e.seam = seam
688 if bound == 'CYCLIC':
689 geom_verts = []
690 if norm == (-1,0,0):
691 geom_verts = [v for v in bm.verts if v.co.x < 0]
692 if norm == (1,0,0):
693 geom_verts = [v for v in bm.verts if v.co.x > 1]
694 if norm == (0,-1,0):
695 geom_verts = [v for v in bm.verts if v.co.y < 0]
696 if norm == (0,1,0):
697 geom_verts = [v for v in bm.verts if v.co.y > 1]
698 if len(geom_verts) > 0:
699 geom = bmesh.ops.region_extend(bm, geom=geom_verts,
700 use_contract=False, use_faces=False, use_face_step=True
702 geom = bmesh.ops.split(bm, geom=geom['geom'], use_only_faces=False)
703 vec = Vector(norm)
704 move_verts = [g for g in geom['geom'] if type(g)==bmesh.types.BMVert]
705 bmesh.ops.translate(bm, vec=-vec, verts=move_verts)
706 for key in bm.verts.layers.shape.keys():
707 sk = bm.verts.layers.shape.get(key)
708 for v in move_verts:
709 v[sk] -= vec
710 moved += len(move_verts)
711 count += 1
712 if moved == 0 or count > 1000: break
713 bm.to_mesh(me1)
715 com_area = bb[0]*bb[1]
716 return ob1, com_area
718 def get_quads(me, bool_selection):
719 nf = len(me.polygons)
721 verts = []
722 materials = []
723 mask = []
724 for poly in me.polygons:
725 p = list(poly.vertices)
726 sides = len(p)
727 if sides == 3:
728 verts.append([[p[0], p[-1]], [p[1], p[2]]])
729 materials.append(poly.material_index)
730 mask.append(poly.select if bool_selection else True)
731 elif sides == 4:
732 verts.append([[p[0], p[3]], [p[1], p[2]]])
733 materials.append(poly.material_index)
734 mask.append(poly.select if bool_selection else True)
735 else:
736 while True:
737 new_poly = [[p[-2], p.pop(-1)], [p[1], p.pop(0)]]
738 verts.append(new_poly)
739 materials.append(poly.material_index)
740 mask.append(poly.select if bool_selection else True)
741 if len(p) < 3: break
742 mask = np.array(mask)
743 materials = np.array(materials)[mask]
744 verts = np.array(verts)[mask]
745 return verts, mask, materials
747 def get_patches(me_low, me_high, sides, subs, bool_selection): #, bool_material_id, material_id):
748 nv = len(me_low.vertices) # number of vertices
749 ne = len(me_low.edges) # number of edges
750 nf = len(me_low.polygons) # number of polygons
751 n = 2**subs + 1
752 nev = ne * n # number of vertices along the subdivided edges
753 nevi = nev - 2*ne # internal vertices along subdividede edges
755 n0 = 2**(subs-1) - 1
757 # filtered polygonal faces
758 poly_sides = [0]*nf
759 me_low.polygons.foreach_get('loop_total',poly_sides)
760 poly_sides = np.array(poly_sides)
761 mask = poly_sides == sides
763 if bool_selection:
764 mask_selection = [True]*nf
765 me_low.polygons.foreach_get('select',mask_selection)
766 mask = np.array(mask_selection)
768 materials = [1]*nf
769 me_low.polygons.foreach_get('material_index',materials)
770 materials = np.array(materials)[mask]
772 polys = np.array(me_low.polygons)[mask]
773 mult = n0**2 + n0
774 ps = poly_sides * mult + 1
775 ps = np.insert(ps,0,nv + nevi, axis=0)[:-1]
776 ips = ps.cumsum()[mask] # incremental polygon sides
777 nf = len(polys)
779 # when subdivided quad faces follows a different pattern
780 if sides == 4:
781 n_patches = nf
782 else:
783 n_patches = nf*sides
785 if sides == 4:
786 patches = np.empty((nf,n,n),dtype='int')
787 verts = [list(p.vertices) for p in polys if len(p.vertices) == sides]
788 verts = np.array(verts).reshape((-1,sides))
790 # filling corners
792 patches[:,0,0] = verts[:,0]
793 patches[:,n-1,0] = verts[:,1]
794 patches[:,n-1,n-1] = verts[:,2]
795 patches[:,0,n-1] = verts[:,3]
797 if subs != 0:
798 shift_verts = np.roll(verts, -1, axis=1)[:,:,None]
799 edge_keys = np.concatenate((shift_verts, verts[:,:,None]), axis=2)
800 edge_keys.sort()
802 edge_verts = np.array(me_low.edge_keys) # edges keys
803 edges_index = np.empty((ne,ne),dtype='int')
804 edges_index[edge_verts[:,0],edge_verts[:,1]] = np.arange(ne)
806 evi = np.arange(nevi) + nv
807 evi = evi.reshape(ne,n-2) # edges inner verts
808 straight = np.arange(n-2)+1
809 inverted = np.flip(straight)
810 inners = np.array([[j*(n-2)+i for j in range(n-2)] for i in range(n-2)])
812 ek1 = me_high.edge_keys # edges keys
813 ek1 = np.array(ek1) # edge keys highres
814 keys0 = ek1[np.arange(ne)*(n-1)] # first inner edge
815 keys1 = ek1[np.arange(ne)*(n-1)+n-2] # last inner edge
816 edges_dir = np.zeros((nev,nev),dtype='bool') # Better memory usage
817 #edges_dir = np.zeros((nev,nev),dtype='int8') ### Memory usage not efficient, dictionary as alternative?
818 edges_dir[keys0[:,0], keys0[:,1]] = 1
819 edges_dir[keys1[:,0], keys1[:,1]] = 1
820 pick_verts = np.array((inverted,straight))
822 patch_index = np.arange(nf)[:,None,None]
824 # edge 0
825 e0 = edge_keys[:,0] # get edge key (faces, 2)
826 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
827 edge_verts = evi[edge_id] # indexes of inner vertices
828 dir = edges_dir[verts[:,0], edge_verts[:,0]] # check correct direction
829 ids = pick_verts[dir.astype('int8')][:,None,:] # indexes order along the side
830 patches[patch_index,ids,0] = edge_verts[:,None,:] # assign indexes
832 # edge 1
833 e0 = edge_keys[:,1] # get edge key (faces, 2)
834 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
835 edge_verts = evi[edge_id] # indexes of inner vertices
836 dir = edges_dir[verts[:,1], edge_verts[:,0]] # check correct direction
837 ids = pick_verts[dir.astype('int8')][:,:,None] # indexes order along the side
838 patches[patch_index,n-1,ids] = edge_verts[:,:,None] # assign indexes
840 # edge 2
841 e0 = edge_keys[:,2] # get edge key (faces, 2)
842 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
843 edge_verts = evi[edge_id] # indexes of inner vertices
844 dir = edges_dir[verts[:,3], edge_verts[:,0]] # check correct direction
845 ids = pick_verts[dir.astype('int8')][:,None,:] # indexes order along the side
846 patches[patch_index,ids,n-1] = edge_verts[:,None,:] # assign indexes
848 # edge 3
849 e0 = edge_keys[:,3] # get edge key (faces, 2)
850 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
851 edge_verts = evi[edge_id] # indexes of inner vertices
852 dir = edges_dir[verts[:,0], edge_verts[:,0]] # check correct direction
853 ids = pick_verts[dir.astype('int8')][:,:,None] # indexes order along the side
854 patches[patch_index,0,ids] = edge_verts[:,:,None] # assign indexes
856 # fill inners
857 patches[:,1:-1,1:-1] = inners[None,:,:] + ips[:,None,None]
859 return patches, mask, materials
861 def get_vertices_numpy(mesh):
863 Create a numpy array with the vertices of a given mesh
865 n_verts = len(mesh.vertices)
866 verts = [0]*n_verts*3
867 mesh.vertices.foreach_get('co', verts)
868 verts = np.array(verts).reshape((n_verts,3))
869 return verts
871 def get_vertices_and_normals_numpy(mesh):
873 Create two numpy arrays with the vertices and the normals of a given mesh
875 n_verts = len(mesh.vertices)
876 verts = [0]*n_verts*3
877 normals = [0]*n_verts*3
878 mesh.vertices.foreach_get('co', verts)
879 mesh.vertices.foreach_get('normal', normals)
880 verts = np.array(verts).reshape((n_verts,3))
881 normals = np.array(normals).reshape((n_verts,3))
882 return verts, normals
884 def get_normals_numpy(mesh):
886 Create a numpy array with the normals of a given mesh
888 n_verts = len(mesh.vertices)
889 normals = [0]*n_verts*3
890 mesh.vertices.foreach_get('normal', normals)
891 normals = np.array(normals).reshape((n_verts,3))
892 return normals
894 def get_edges_numpy(mesh):
896 Create a numpy array with the edges of a given mesh
898 n_edges = len(mesh.edges)
899 edges = [0]*n_edges*2
900 mesh.edges.foreach_get('vertices', edges)
901 edges = np.array(edges).reshape((n_edges,2)).astype('int')
902 return edges
904 def get_edges_id_numpy(mesh):
905 n_edges = len(mesh.edges)
906 edges = [0]*n_edges*2
907 mesh.edges.foreach_get('vertices', edges)
908 edges = np.array(edges).reshape((n_edges,2))
909 indexes = np.arange(n_edges).reshape((n_edges,1))
910 edges = np.concatenate((edges,indexes), axis=1)
911 return edges
913 def get_polygons_select_numpy(mesh):
914 n_polys = len(mesh.polygons)
915 selections = [0]*n_polys*2
916 mesh.polygons.foreach_get('select', selections)
917 selections = np.array(selections)
918 return selections
920 def get_attribute_numpy(elements_list, attribute='select', mult=1):
922 Generate a numpy array getting attribute from a list of element using
923 the foreach_get() function.
925 n_elements = len(elements_list)
926 values = [0]*n_elements*mult
927 elements_list.foreach_get(attribute, values)
928 values = np.array(values)
929 if mult > 1: values = values.reshape((n_elements,mult))
930 return values
932 def get_vertices(mesh):
933 n_verts = len(mesh.vertices)
934 verts = [0]*n_verts*3
935 mesh.vertices.foreach_get('co', verts)
936 verts = np.array(verts).reshape((n_verts,3))
937 verts = [Vector(v) for v in verts]
938 return verts
940 def get_faces(mesh):
941 faces = [[v for v in f.vertices] for f in mesh.polygons]
942 return faces
944 def get_faces_numpy(mesh):
945 faces = [[v for v in f.vertices] for f in mesh.polygons]
946 return np.array(faces)
948 def get_faces_edges_numpy(mesh):
949 faces = [v.edge_keys for f in mesh.polygons]
950 return np.array(faces)
952 def find_curves(edges, n_verts):
953 verts_dict = {key:[] for key in range(n_verts)}
954 for e in edges:
955 verts_dict[e[0]].append(e[1])
956 verts_dict[e[1]].append(e[0])
957 curves = []
958 while True:
959 if len(verts_dict) == 0: break
960 # next starting point
961 v = list(verts_dict.keys())[0]
962 # neighbors
963 v01 = verts_dict[v]
964 if len(v01) == 0:
965 verts_dict.pop(v)
966 continue
967 curve = []
968 if len(v01) > 1: curve.append(v01[1]) # add neighbors
969 curve.append(v) # add starting point
970 curve.append(v01[0]) # add neighbors
971 verts_dict.pop(v)
972 # start building curve
973 while True:
974 #last_point = curve[-1]
975 #if last_point not in verts_dict: break
977 # try to change direction if needed
978 if curve[-1] in verts_dict: pass
979 elif curve[0] in verts_dict: curve.reverse()
980 else: break
982 # neighbors points
983 last_point = curve[-1]
984 v01 = verts_dict[last_point]
986 # curve end
987 if len(v01) == 1:
988 verts_dict.pop(last_point)
989 if curve[0] in verts_dict: continue
990 else: break
992 # chose next point
993 new_point = None
994 if v01[0] == curve[-2]: new_point = v01[1]
995 elif v01[1] == curve[-2]: new_point = v01[0]
996 #else: break
998 #if new_point != curve[1]:
999 curve.append(new_point)
1000 verts_dict.pop(last_point)
1001 if curve[0] == curve[-1]:
1002 verts_dict.pop(new_point)
1003 break
1004 curves.append(curve)
1005 return curves
1007 def curve_from_points(points, name='Curve'):
1008 curve = bpy.data.curves.new(name,'CURVE')
1009 for c in points:
1010 s = curve.splines.new('POLY')
1011 s.points.add(len(c))
1012 for i,p in enumerate(c): s.points[i].co = p.xyz + [1]
1013 ob_curve = bpy.data.objects.new(name,curve)
1014 return ob_curve
1016 def curve_from_pydata(points, radii, indexes, name='Curve', skip_open=False, merge_distance=1, set_active=True, only_data=False):
1017 curve = bpy.data.curves.new(name,'CURVE')
1018 curve.dimensions = '3D'
1019 use_rad = True
1020 for c in indexes:
1021 bool_cyclic = c[0] == c[-1]
1022 if bool_cyclic: c.pop(-1)
1023 # cleanup
1024 pts = np.array([points[i] for i in c])
1025 try:
1026 rad = np.array([radii[i] for i in c])
1027 except:
1028 use_rad = False
1029 rad = 1
1030 if merge_distance > 0:
1031 pts1 = np.roll(pts,1,axis=0)
1032 dist = np.linalg.norm(pts1-pts, axis=1)
1033 count = 0
1034 n = len(dist)
1035 mask = np.ones(n).astype('bool')
1036 for i in range(n):
1037 count += dist[i]
1038 if count > merge_distance: count = 0
1039 else: mask[i] = False
1040 pts = pts[mask]
1041 if use_rad: rad = rad[mask]
1043 if skip_open and not bool_cyclic: continue
1044 s = curve.splines.new('POLY')
1045 n_pts = len(pts)
1046 s.points.add(n_pts-1)
1047 w = np.ones(n_pts).reshape((n_pts,1))
1048 co = np.concatenate((pts,w),axis=1).reshape((n_pts*4))
1049 s.points.foreach_set('co',co)
1050 if use_rad: s.points.foreach_set('radius',rad)
1051 s.use_cyclic_u = bool_cyclic
1052 if only_data:
1053 return curve
1054 else:
1055 ob_curve = bpy.data.objects.new(name,curve)
1056 bpy.context.collection.objects.link(ob_curve)
1057 if set_active:
1058 bpy.context.view_layer.objects.active = ob_curve
1059 return ob_curve
1061 def update_curve_from_pydata(curve, points, normals, radii, indexes, merge_distance=1, pattern=[1,0], depth=0.1, offset=0):
1062 curve.splines.clear()
1063 use_rad = True
1064 for ic, c in enumerate(indexes):
1065 bool_cyclic = c[0] == c[-1]
1066 if bool_cyclic: c.pop(-1)
1068 # cleanup
1069 pts = np.array([points[i] for i in c if i != None])
1070 nor = np.array([normals[i] for i in c if i != None])
1071 try:
1072 rad = np.array([radii[i] for i in c if i != None])
1073 except:
1074 use_rad = False
1075 rad = 1
1076 if merge_distance > 0:
1077 pts1 = np.roll(pts,1,axis=0)
1078 dist = np.linalg.norm(pts1-pts, axis=1)
1079 count = 0
1080 n = len(dist)
1081 mask = np.ones(n).astype('bool')
1082 for i in range(n):
1083 count += dist[i]
1084 if count > merge_distance: count = 0
1085 else: mask[i] = False
1086 pts = pts[mask]
1087 nor = nor[mask]
1088 if use_rad: rad = rad[mask]
1089 #if skip_open and not bool_cyclic: continue
1090 n_pts = len(pts)
1091 series = np.arange(n_pts)
1092 patt1 = series + (series-series%pattern[1])/pattern[1]*pattern[0]+pattern[0]
1093 patt1 = patt1[patt1<n_pts].astype('int')
1094 patt0 = series + (series-series%pattern[0])/pattern[0]*pattern[1]
1095 patt0 = patt0[patt0<n_pts].astype('int')
1096 nor[patt0] *= 0.5*depth*(1 + offset)
1097 nor[patt1] *= 0.5*depth*(-1 + offset)
1098 if pattern[0]*pattern[1] != 0: pts += nor
1099 s = curve.splines.new('POLY')
1100 s.points.add(n_pts-1)
1101 w = np.ones(n_pts).reshape((n_pts,1))
1102 co = np.concatenate((pts,w),axis=1).reshape((n_pts*4))
1103 s.points.foreach_set('co',co)
1104 if use_rad: s.points.foreach_set('radius',rad)
1105 s.use_cyclic_u = bool_cyclic
1107 def loops_from_bmesh(edges):
1109 Return one or more loops given some starting edges.
1110 :arg edges: Edges used as seeds.
1111 :type edges: List of :class:'bmesh.types.BMEdge'
1112 :return: Elements in each loop (Verts, Edges), where:
1113 - Verts - List of Lists of :class:'bmesh.types.BMVert'
1114 - Edges - List of Lists of :class:'bmesh.types.BMEdge'
1115 :rtype: tuple
1117 todo_edges = list(edges)
1118 #todo_edges = [e.index for e in bm.edges]
1119 vert_loops = []
1120 edge_loops = []
1121 while len(todo_edges) > 0:
1122 edge = todo_edges[0]
1123 vert_loop, edge_loop = run_edge_loop(edge)
1124 for e in edge_loop:
1125 try: todo_edges.remove(e)
1126 except: pass
1127 edge_loops.append(edge_loop)
1128 vert_loops.append(vert_loop)
1129 #if len(todo_edges) == 0: break
1130 return vert_loops, edge_loops
1132 def run_edge_loop_direction(edge,vert):
1134 Return vertices and edges along a loop in a specific direction.
1135 :arg edge: Edges used as seed.
1136 :type edges: :class:'bmesh.types.BMEdge'
1137 :arg edge: Vertex of the Edge used for the direction.
1138 :type vert: :class:'bmesh.types.BMVert'
1139 :return: Elements in the loop (Verts, Edges), where:
1140 - Verts - List of :class:'bmesh.types.BMVert'
1141 - Edges - List of :class:'bmesh.types.BMEdge'
1142 :rtype: tuple
1144 edge0 = edge
1145 edge_loop = [edge]
1146 vert_loop = [vert]
1147 while True:
1148 link_edges = list(vert.link_edges)
1149 link_edges.remove(edge)
1150 n_edges = len(link_edges)
1151 if n_edges == 1:
1152 edge = link_edges[0]
1153 elif n_edges < 4:
1154 link_faces = edge.link_faces
1155 if len(link_faces) == 0: break
1156 edge = None
1157 for e in link_edges:
1158 link_faces1 = e.link_faces
1159 if len(link_faces) == len(link_faces1):
1160 common_faces = [f for f in link_faces1 if f in link_faces]
1161 if len(common_faces) == 0:
1162 edge = e
1163 break
1164 else: break
1165 if edge == None: break
1166 edge_loop.append(edge)
1167 vert = edge.other_vert(vert)
1168 vert_loop.append(vert)
1169 if edge == edge0: break
1170 return vert_loop, edge_loop
1172 def run_edge_loop(edge):
1174 Return vertices and edges along a loop in both directions.
1175 :arg edge: Edges used as seed.
1176 :type edges: :class:'bmesh.types.BMEdge'
1177 :return: Elements in the loop (Verts, Edges), where:
1178 - Verts - List of :class:'bmesh.types.BMVert'
1179 - Edges - List of :class:'bmesh.types.BMEdge'
1180 :rtype: tuple
1182 vert0 = edge.verts[0]
1183 vert_loop0, edge_loop0 = run_edge_loop_direction(edge, vert0)
1184 if len(edge_loop0) == 1 or edge_loop0[0] != edge_loop0[-1]:
1185 vert1 = edge.verts[1]
1186 vert_loop1, edge_loop1 = run_edge_loop_direction(edge, vert1)
1187 edge_loop0.reverse()
1188 vert_loop0.reverse()
1189 edge_loop = edge_loop0[:-1] + edge_loop1
1190 vert_loop = vert_loop0 + vert_loop1
1191 else:
1192 edge_loop = edge_loop0[1:]
1193 vert_loop = vert_loop0
1194 return vert_loop, edge_loop
1196 def curve_from_vertices(indexes, verts, name='Curve'):
1198 Curve data from given vertices.
1199 :arg indexes: List of Lists of indexes of the vertices.
1200 :type indexes: List of Lists of int
1201 :arg verts: List of vertices.
1202 :type verts: List of :class:'bpy.types.MeshVertex'
1203 :arg name: Name of the Curve data.
1204 :type name: str
1205 :return: Generated Curve data
1206 :rtype: :class:'bpy.types.Curve'
1208 curve = bpy.data.curves.new(name,'CURVE')
1209 for c in indexes:
1210 s = curve.splines.new('POLY')
1211 s.points.add(len(c))
1212 for i,p in enumerate(c):
1213 s.points[i].co = verts[p].co.xyz + [1]
1214 #s.points[i].tilt = degrees(asin(verts[p].co.z))
1215 ob_curve = bpy.data.objects.new(name,curve)
1216 return ob_curve
1218 def nurbs_from_vertices(indexes, co, radii=[], name='Curve', set_active=True, interpolation='POLY'):
1219 curve = bpy.data.curves.new(name,'CURVE')
1220 curve.dimensions = '3D'
1221 curve.resolution_u = 2
1222 curve.bevel_depth = 0.01
1223 curve.bevel_resolution = 0
1224 for pts in indexes:
1225 s = curve.splines.new(interpolation)
1226 n_pts = len(pts)
1227 s.points.add(n_pts-1)
1228 w = np.ones(n_pts).reshape((n_pts,1))
1229 curve_co = np.concatenate((co[pts],w),axis=1).reshape((n_pts*4))
1230 s.points.foreach_set('co',curve_co)
1231 try:
1232 s.points.foreach_set('radius',radii[pts])
1233 except: pass
1234 s.use_endpoint_u = True
1236 ob_curve = bpy.data.objects.new(name,curve)
1237 bpy.context.collection.objects.link(ob_curve)
1238 if set_active:
1239 bpy.context.view_layer.objects.active = ob_curve
1240 ob_curve.select_set(True)
1241 return ob_curve
1243 # ------------------------------------------------------------------
1244 # VERTEX GROUPS AND WEIGHT
1245 # ------------------------------------------------------------------
1247 def get_weight(vertex_group, n_verts):
1249 Read weight values from given Vertex Group.
1250 :arg vertex_group: Vertex Group.
1251 :type vertex_group: :class:'bpy.types.VertexGroup'
1252 :arg n_verts: Number of Vertices (output list size).
1253 :type n_verts: int
1254 :return: Read weight values.
1255 :rtype: list
1257 weight = [0]*n_verts
1258 for i in range(n_verts):
1259 try: weight[i] = vertex_group.weight(i)
1260 except: pass
1261 return weight
1263 def get_weight_numpy(vertex_group, n_verts):
1265 Read weight values from given Vertex Group.
1266 :arg vertex_group: Vertex Group.
1267 :type vertex_group: :class:'bpy.types.VertexGroup'
1268 :arg n_verts: Number of Vertices (output list size).
1269 :type n_verts: int
1270 :return: Read weight values as numpy array.
1271 :rtype: :class:'numpy.ndarray'
1273 weight = [0]*n_verts
1274 for i in range(n_verts):
1275 try: weight[i] = vertex_group.weight(i)
1276 except: pass
1277 return np.array(weight)
1279 def bmesh_get_weight_numpy(group_index, layer, verts):
1280 weight = np.zeros(len(verts))
1281 for i, v in enumerate(verts):
1282 dvert = v[layer]
1283 if group_index in dvert:
1284 weight[i] = dvert[group_index]
1285 #dvert[group_index] = 0.5
1286 return weight
1288 def bmesh_set_weight_numpy(group_index, layer, verts, weight):
1289 for i, v in enumerate(verts):
1290 dvert = v[layer]
1291 if group_index in dvert:
1292 dvert[group_index] = weight[i]
1293 return verts
1295 def bmesh_set_weight_numpy(bm, group_index, weight):
1296 layer = bm.verts.layers.deform.verify()
1297 for i, v in enumerate(bm.verts):
1298 dvert = v[layer]
1299 #if group_index in dvert:
1300 dvert[group_index] = weight[i]
1301 return bm
1303 def set_weight_numpy(vg, weight):
1304 for i, w in enumerate(weight):
1305 vg.add([i], w, 'REPLACE')
1306 return vg
1308 def uv_from_bmesh(bm, uv_index=None):
1309 if uv_index:
1310 uv_lay = bm.loops.layers.uv[uv_index]
1311 else:
1312 uv_lay = bm.loops.layers.uv.active
1313 uv_co = [0]*len(bm.verts)
1315 for face in bm.faces:
1316 for vert,loop in zip(face.verts, face.loops):
1317 uv_co[vert.index] = loop[uv_lay].uv
1318 return uv_co
1320 def get_uv_edge_vectors(me, uv_map = 0, only_positive=False):
1321 count = 0
1322 uv_vectors = {}
1323 for i, f in enumerate(me.polygons):
1324 f_verts = len(f.vertices)
1325 for j0 in range(f_verts):
1326 j1 = (j0+1)%f_verts
1327 uv0 = me.uv_layers[uv_map].data[count+j0].uv
1328 uv1 = me.uv_layers[uv_map].data[count+j1].uv
1329 delta_uv = (uv1-uv0).normalized()
1330 if only_positive:
1331 delta_uv.x = abs(delta_uv.x)
1332 delta_uv.y = abs(delta_uv.y)
1333 edge_key = tuple(sorted([f.vertices[j0], f.vertices[j1]]))
1334 uv_vectors[edge_key] = delta_uv
1335 count += f_verts
1336 uv_vectors = [uv_vectors[tuple(sorted(e.vertices))] for e in me.edges]
1337 return uv_vectors
1339 def mesh_diffusion(me, values, iter, diff=0.2, uv_dir=0):
1340 values = np.array(values)
1341 n_verts = len(me.vertices)
1343 n_edges = len(me.edges)
1344 edge_verts = [0]*n_edges*2
1345 #me.edges.foreach_get("vertices", edge_verts)
1347 count = 0
1348 edge_verts = []
1349 uv_factor = {}
1350 uv_ang = (0.5 + uv_dir*0.5)*pi/2
1351 uv_vec = Vector((cos(uv_ang), sin(uv_ang)))
1352 for i, f in enumerate(me.polygons):
1353 f_verts = len(f.vertices)
1354 for j0 in range(f_verts):
1355 j1 = (j0+1)%f_verts
1356 if uv_dir != 0:
1357 uv0 = me.uv_layers[0].data[count+j0].uv
1358 uv1 = me.uv_layers[0].data[count+j1].uv
1359 delta_uv = (uv1-uv0).normalized()
1360 delta_uv.x = abs(delta_uv.x)
1361 delta_uv.y = abs(delta_uv.y)
1362 dir = uv_vec.dot(delta_uv)
1363 else:
1364 dir = 1
1365 #dir = abs(dir)
1366 #uv_factor.append(dir)
1367 edge_key = [f.vertices[j0], f.vertices[j1]]
1368 edge_key.sort()
1369 uv_factor[tuple(edge_key)] = dir
1370 count += f_verts
1371 id0 = []
1372 id1 = []
1373 uv_mult = []
1374 for ek, val in uv_factor.items():
1375 id0.append(ek[0])
1376 id1.append(ek[1])
1377 uv_mult.append(val)
1378 id0 = np.array(id0)
1379 id1 = np.array(id1)
1380 uv_mult = np.array(uv_mult)
1382 #edge_verts = np.array(edge_verts)
1383 #arr = np.arange(n_edges)*2
1385 #id0 = edge_verts[arr] # first vertex indices for each edge
1386 #id1 = edge_verts[arr+1] # second vertex indices for each edge
1387 for ii in range(iter):
1388 lap = np.zeros(n_verts)
1389 if uv_dir != 0:
1390 lap0 = (values[id1] - values[id0])*uv_mult # laplacian increment for first vertex of each edge
1391 else:
1392 lap0 = (values[id1] - values[id0])
1393 np.add.at(lap, id0, lap0)
1394 np.add.at(lap, id1, -lap0)
1395 values += diff*lap
1396 return values
1398 def mesh_diffusion_vector(me, vectors, iter, diff, uv_dir=0):
1399 vectors = np.array(vectors)
1400 x = vectors[:,0]
1401 y = vectors[:,1]
1402 z = vectors[:,2]
1403 x = mesh_diffusion(me, x, iter, diff, uv_dir)
1404 y = mesh_diffusion(me, y, iter, diff, uv_dir)
1405 z = mesh_diffusion(me, z, iter, diff, uv_dir)
1406 vectors[:,0] = x
1407 vectors[:,1] = y
1408 vectors[:,2] = z
1409 return vectors
1411 # ------------------------------------------------------------------
1412 # MODIFIERS
1413 # ------------------------------------------------------------------
1415 def mod_preserve_topology(mod):
1416 same_topology_modifiers = ('DATA_TRANSFER','NORMAL_EDIT','WEIGHTED_NORMAL',
1417 'UV_PROJECT','UV_WARP','VERTEX_WEIGHT_EDIT','VERTEX_WEIGHT_MIX',
1418 'VERTEX_WEIGHT_PROXIMITY','ARMATURE','CAST','CURVE','DISPLACE','HOOK',
1419 'LAPLACIANDEFORM','LATTICE','MESH_DEFORM','SHRINKWRAP','SIMPLE_DEFORM',
1420 'SMOOTH','CORRECTIVE_SMOOTH','LAPLACIANSMOOTH','SURFACE_DEFORM','WARP',
1421 'WAVE','CLOTH','COLLISION','DYNAMIC_PAINT','SOFT_BODY'
1423 return mod.type in same_topology_modifiers
1425 def mod_preserve_shape(mod):
1426 same_shape_modifiers = ('DATA_TRANSFER','NORMAL_EDIT','WEIGHTED_NORMAL',
1427 'UV_PROJECT','UV_WARP','VERTEX_WEIGHT_EDIT','VERTEX_WEIGHT_MIX',
1428 'VERTEX_WEIGHT_PROXIMITY','DYNAMIC_PAINT'
1430 return mod.type in same_shape_modifiers
1433 def recurLayerCollection(layerColl, collName):
1435 Recursively transverse layer_collection for a particular name.
1437 found = None
1438 if (layerColl.name == collName):
1439 return layerColl
1440 for layer in layerColl.children:
1441 found = recurLayerCollection(layer, collName)
1442 if found:
1443 return found
1445 def auto_layer_collection():
1447 Automatically change active layer collection.
1449 layer = bpy.context.view_layer.active_layer_collection
1450 layer_collection = bpy.context.view_layer.layer_collection
1451 if layer.hide_viewport or layer.collection.hide_viewport:
1452 collections = bpy.context.object.users_collection
1453 for c in collections:
1454 lc = recurLayerCollection(layer_collection, c.name)
1455 if not c.hide_viewport and not lc.hide_viewport:
1456 bpy.context.view_layer.active_layer_collection = lc