Sun Position: update translation
[blender-addons.git] / mesh_tissue / utils.py
bloba3427c515a1b0126656de3bc4465b950c05e3754
1 # SPDX-FileCopyrightText: 2019-2023 Blender Foundation
3 # SPDX-License-Identifier: GPL-2.0-or-later
5 import bpy, bmesh
6 import threading
7 import numpy as np
8 import multiprocessing
9 from multiprocessing import Process, Pool
10 from mathutils import Vector, Matrix
11 from math import *
12 try: from .numba_functions import *
13 except: pass
15 from . import config
17 def use_numba_tess():
18 tissue_addon = bpy.context.preferences.addons[__package__]
19 if 'use_numba_tess' in tissue_addon.preferences.keys():
20 return tissue_addon.preferences['use_numba_tess']
21 else:
22 return True
24 def tissue_time(start_time, name, levels=0):
25 tissue_addon = bpy.context.preferences.addons[__package__]
26 end_time = time.time()
27 if 'print_stats' in tissue_addon.preferences.keys():
28 ps = tissue_addon.preferences['print_stats']
29 else:
30 ps = 1
31 if levels < ps:
32 if "Tissue: " in name: head = ""
33 else: head = " "
34 if start_time:
35 print('{}{}{} in {:.4f} sec'.format(head, "| "*levels, name, end_time - start_time))
36 else:
37 print('{}{}{}'.format(head, "| "*levels, name))
38 return end_time
41 # ------------------------------------------------------------------
42 # MATH
43 # ------------------------------------------------------------------
45 def _np_broadcast(arrays):
46 shapes = [arr.shape for arr in arrays]
47 for i in range(len(shapes[0])):
48 ish = [sh[i] for sh in shapes]
49 max_len = max(ish)
50 for j in range(len(arrays)):
51 leng = ish[j]
52 if leng == 1: arrays[j] = np.repeat(arrays[j], max_len, axis=i)
53 for arr in arrays:
54 arr = arr.flatten()
55 #vt = v0 + (v1 - v0) * t
56 return arrays
58 def lerp(a, b, t):
59 return a + (b - a) * t
61 def _lerp2(v1, v2, v3, v4, v):
62 v12 = v1.lerp(v2,v.x) # + (v2 - v1) * v.x
63 v34 = v3.lerp(v4,v.x) # + (v4 - v3) * v.x
64 return v12.lerp(v34, v.y)# + (v34 - v12) * v.y
66 def lerp2(v1, v2, v3, v4, v):
67 v12 = v1 + (v2 - v1) * v.x
68 v34 = v3 + (v4 - v3) * v.x
69 v = v12 + (v34 - v12) * v.y
70 return v
72 def lerp3(v1, v2, v3, v4, v):
73 loc = lerp2(v1.co, v2.co, v3.co, v4.co, v)
74 nor = lerp2(v1.normal, v2.normal, v3.normal, v4.normal, v)
75 nor.normalize()
76 return loc + nor * v.z
78 import sys
79 def np_lerp2(v00, v10, v01, v11, vx, vy, mode=''):
80 if 'numba' in sys.modules and use_numba_tess():
81 if mode == 'verts':
82 co2 = numba_interp_points(v00, v10, v01, v11, vx, vy)
83 elif mode == 'shapekeys':
84 co2 = numba_interp_points_sk(v00, v10, v01, v11, vx, vy)
85 else:
86 co2 = numba_lerp2(v00, v10, v01, v11, vx, vy)
87 else:
88 co0 = v00 + (v10 - v00) * vx
89 co1 = v01 + (v11 - v01) * vx
90 co2 = co0 + (co1 - co0) * vy
91 return co2
93 def calc_thickness(co2,n2,vz,a,weight):
94 if 'numba' in sys.modules and use_numba_tess():
95 if len(co2.shape) == 3:
96 if type(a) != np.ndarray:
97 a = np.ones(len(co2)).reshape((-1,1,1))
98 if type(weight) != np.ndarray:
99 weight = np.ones(len(co2)).reshape((-1,1,1))
100 co3 = numba_calc_thickness_area_weight(co2,n2,vz,a,weight)
101 elif len(co2.shape) == 4:
102 n_patches = co2.shape[0]
103 n_sk = co2.shape[1]
104 n_verts = co2.shape[2]
105 if type(a) != np.ndarray:
106 a = np.ones(n_patches).reshape((n_patches,1,1,1))
107 if type(weight) != np.ndarray:
108 weight = np.ones(n_patches).reshape((n_patches,1,1,1))
109 na = a.shape[1]-1
110 nw = weight.shape[1]-1
111 co3 = np.empty((n_sk,n_patches,n_verts,3))
112 for i in range(n_sk):
113 co3[i] = numba_calc_thickness_area_weight(co2[:,i],n2[:,i],vz[:,i],a[:,min(i,na)],weight[:,min(i,nw)])
114 co3 = co3.swapaxes(0,1)
115 else:
116 use_area = type(a) == np.ndarray
117 use_weight = type(weight) == np.ndarray
118 if use_area:
119 if use_weight:
120 co3 = co2 + n2 * vz * a * weight
121 else:
122 co3 = co2 + n2 * vz * a
123 else:
124 if use_weight:
125 co3 = co2 + n2 * vz * weight
126 else:
127 co3 = co2 + n2 * vz
128 return co3
130 def combine_and_flatten(arrays):
131 if 'numba' in sys.modules:
132 new_list = numba_combine_and_flatten(arrays)
133 else:
134 new_list = np.concatenate(arrays, axis=0)
135 new_list = new_list.flatten().tolist()
136 return new_list
138 def np_interp2(grid, vx, vy):
139 grid_shape = grid.shape[-2:]
140 levels = len(grid.shape)-2
141 nu = grid_shape[0]
142 nv = grid_shape[1]
143 u = np.arange(nu)/(nu-1)
144 v = np.arange(nv)/(nv-1)
145 u_shape = [1]*levels + [nu]
146 v_shape = [1]*levels + [nv]
148 co0 = np.interp()
149 co1 = np.interp()
150 co2 = np.interp()
151 return co2
153 def flatten_vector(vec, x, y):
155 Find planar vector according to two axis.
156 :arg vec: Input vector.
157 :type vec: :class:'mathutils.Vector'
158 :arg x: First axis.
159 :type x: :class:'mathutils.Vector'
160 :arg y: Second axis.
161 :type y: :class:'mathutils.Vector'
162 :return: Projected 2D Vector.
163 :rtype: :class:'mathutils.Vector'
165 vx = vec.project(x)
166 vy = vec.project(y)
167 mult = 1 if vx.dot(x) > 0 else -1
168 vx = mult*vx.length
169 mult = 1 if vy.dot(y) > 0 else -1
170 vy = mult*vy.length
171 return Vector((vx, vy))
173 def vector_rotation(vec):
175 Find vector rotation according to X axis.
176 :arg vec: Input vector.
177 :type vec: :class:'mathutils.Vector'
178 :return: Angle in radians.
179 :rtype: float
181 v0 = Vector((1,0))
182 ang = Vector.angle_signed(vec, v0)
183 if ang < 0: ang = 2*pi + ang
184 return ang
186 # ------------------------------------------------------------------
187 # SCENE
188 # ------------------------------------------------------------------
190 def set_animatable_fix_handler(self, context):
192 Prevent Blender Crashes with handlers
194 old_handlers = []
195 blender_handlers = bpy.app.handlers.render_init
196 for h in blender_handlers:
197 if "turn_off_animatable" in str(h):
198 old_handlers.append(h)
199 for h in old_handlers: blender_handlers.remove(h)
200 blender_handlers.append(turn_off_animatable)
201 return
203 def turn_off_animatable(scene):
205 Prevent Blender Crashes with handlers
207 for o in [o for o in bpy.data.objects if o.type == 'MESH']:
208 o.tissue_tessellate.bool_run = False
209 #if not o.reaction_diffusion_settings.bool_cache:
210 # o.reaction_diffusion_settings.run = False
211 #except: pass
212 return
214 # ------------------------------------------------------------------
215 # OBJECTS
216 # ------------------------------------------------------------------
218 def convert_object_to_mesh(ob, apply_modifiers=True, preserve_status=True):
219 try: ob.name
220 except: return None
221 if ob.type != 'MESH':
222 if not apply_modifiers:
223 mod_visibility = [m.show_viewport for m in ob.modifiers]
224 for m in ob.modifiers: m.show_viewport = False
225 #ob.modifiers.update()
226 #dg = bpy.context.evaluated_depsgraph_get()
227 #ob_eval = ob.evaluated_get(dg)
228 #me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg)
229 me = simple_to_mesh(ob)
230 new_ob = bpy.data.objects.new(ob.data.name, me)
231 new_ob.location, new_ob.matrix_world = ob.location, ob.matrix_world
232 if not apply_modifiers:
233 for m,vis in zip(ob.modifiers,mod_visibility): m.show_viewport = vis
234 else:
235 if apply_modifiers:
236 new_ob = ob.copy()
237 new_me = simple_to_mesh(ob)
238 new_ob.modifiers.clear()
239 new_ob.data = new_me
240 else:
241 new_ob = ob.copy()
242 new_ob.data = ob.data.copy()
243 new_ob.modifiers.clear()
244 bpy.context.collection.objects.link(new_ob)
245 if preserve_status:
246 new_ob.select_set(False)
247 else:
248 for o in bpy.context.view_layer.objects: o.select_set(False)
249 new_ob.select_set(True)
250 bpy.context.view_layer.objects.active = new_ob
251 return new_ob
253 def simple_to_mesh(ob, depsgraph=None):
255 Convert object to mesh applying Modifiers and Shape Keys
257 #global evaluatedDepsgraph
258 if depsgraph == None:
259 if config.evaluatedDepsgraph == None:
260 dg = bpy.context.evaluated_depsgraph_get()
261 else: dg = config.evaluatedDepsgraph
262 else:
263 dg = depsgraph
264 ob_eval = ob.evaluated_get(dg)
265 me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg)
266 return me
268 def _join_objects(context, objects, link_to_scene=True, make_active=True):
269 C = context
270 bm = bmesh.new()
272 materials = {}
273 faces_materials = []
274 if config.evaluatedDepsgraph == None:
275 dg = C.evaluated_depsgraph_get()
276 else: dg = config.evaluatedDepsgraph
278 for o in objects:
279 bm.from_object(o, dg)
280 # add object's material to the dictionary
281 for m in o.data.materials:
282 if m not in materials: materials[m] = len(materials)
283 for f in o.data.polygons:
284 index = f.material_index
285 mat = o.material_slots[index].material
286 new_index = materials[mat]
287 faces_materials.append(new_index)
288 bm.verts.ensure_lookup_table()
289 bm.edges.ensure_lookup_table()
290 bm.faces.ensure_lookup_table()
291 # assign new indexes
292 for index, f in zip(faces_materials, bm.faces): f.material_index = index
293 # create object
294 me = bpy.data.meshes.new('joined')
295 bm.to_mesh(me)
296 me.update()
297 ob = bpy.data.objects.new('joined', me)
298 if link_to_scene: C.collection.objects.link(ob)
299 # make active
300 if make_active:
301 for o in C.view_layer.objects: o.select_set(False)
302 ob.select_set(True)
303 C.view_layer.objects.active = ob
304 # add materials
305 for m in materials.keys(): ob.data.materials.append(m)
307 return ob
309 def join_objects(context, objects):
310 generated_data = [o.data for o in objects]
311 context.view_layer.update()
312 for o in context.view_layer.objects:
313 o.select_set(o in objects)
314 bpy.ops.object.join()
315 new_ob = context.view_layer.objects.active
316 new_ob.select_set(True)
317 for me in generated_data:
318 if me != new_ob.data:
319 bpy.data.meshes.remove(me)
320 return new_ob
322 def join_objects(objects):
323 override = bpy.context.copy()
324 new_ob = objects[0]
325 override['active_object'] = new_ob
326 override['selected_editable_objects'] = objects
327 bpy.ops.object.join(override)
328 return new_ob
330 def repeat_mesh(me, n):
332 Return Mesh data adding and applying an array without offset (Slower)
334 bm = bmesh.new()
335 for i in range(n): bm.from_mesh(me)
336 new_me = me.copy()
337 bm.to_mesh(new_me)
338 bm.free()
339 return new_me
341 def array_mesh(ob, n):
343 Return Mesh data adding and applying an array without offset
345 arr = ob.modifiers.new('Repeat','ARRAY')
346 arr.relative_offset_displace[0] = 0
347 arr.count = n
348 # with bpy.context.temp_override(active_object=ob):
349 # bpy.ops.object.modifier_apply(modifier='Repeat')
350 # me = ob.data
351 ob.modifiers.update()
353 dg = bpy.context.evaluated_depsgraph_get()
354 me = simple_to_mesh(ob, depsgraph=dg)
355 ob.modifiers.remove(arr)
356 return me
358 def array_mesh_object(ob, n):
360 Return Mesh data adding and applying an array without offset
362 arr = ob.modifiers.new('Repeat','ARRAY')
363 arr.relative_offset_displace[0] = 0
364 arr.count = n
365 ob.modifiers.update()
366 override = bpy.context.copy()
367 override['active_object'] = ob
368 override = {'active_object': ob}
369 bpy.ops.object.modifier_apply(override, modifier=arr.name)
370 return ob
373 def get_mesh_before_subs(ob):
374 not_allowed = ('FLUID_SIMULATION', 'ARRAY', 'BEVEL', 'BOOLEAN', 'BUILD',
375 'DECIMATE', 'EDGE_SPLIT', 'MASK', 'MIRROR', 'REMESH',
376 'SCREW', 'SOLIDIFY', 'TRIANGULATE', 'WIREFRAME', 'SKIN',
377 'EXPLODE', 'PARTICLE_INSTANCE', 'PARTICLE_SYSTEM', 'SMOKE')
378 subs = 0
379 hide_mods = []
380 mods_visibility = []
381 for m in ob.modifiers:
382 hide_mods.append(m)
383 mods_visibility.append(m.show_viewport)
384 if m.type in ('SUBSURF','MULTIRES'):
385 hide_mods = [m]
386 subs = m.levels
387 elif m.type in not_allowed:
388 subs = 0
389 hide_mods = []
390 mods_visibility = []
391 for m in hide_mods: m.show_viewport = False
392 me = simple_to_mesh(ob)
393 for m, vis in zip(hide_mods,mods_visibility): m.show_viewport = vis
394 return me, subs
396 # ------------------------------------------------------------------
397 # MESH FUNCTIONS
398 # ------------------------------------------------------------------
400 def calc_verts_area(me):
401 n_verts = len(me.vertices)
402 n_faces = len(me.polygons)
403 vareas = np.zeros(n_verts)
404 vcount = np.zeros(n_verts)
405 parea = [0]*n_faces
406 pverts = [0]*n_faces*4
407 me.polygons.foreach_get('area', parea)
408 me.polygons.foreach_get('vertices', pverts)
409 parea = np.array(parea)
410 pverts = np.array(pverts).reshape((n_faces, 4))
411 for a, verts in zip(parea,pverts):
412 vareas[verts] += a
413 vcount[verts] += 1
414 return vareas / vcount
416 def calc_verts_area_bmesh(me):
417 bm = bmesh.new()
418 bm.from_mesh(me)
419 bm.verts.ensure_lookup_table()
420 verts_area = np.zeros(len(me.vertices))
421 for v in bm.verts:
422 area = 0
423 faces = v.link_faces
424 for f in faces:
425 area += f.calc_area()
426 verts_area[v.index] = area if area == 0 else area/len(faces)
427 bm.free()
428 return verts_area
430 import time
432 def get_patches____(me_low, me_high, sides, subs, bool_selection, bool_material_id, material_id):
433 nv = len(me_low.vertices) # number of vertices
434 ne = len(me_low.edges) # number of edges
435 nf = len(me_low.polygons) # number of polygons
436 n = 2**subs + 1 # number of vertices along each patch edge
437 nev = ne * n # number of vertices along the subdivided edges
438 nevi = nev - 2*ne # internal vertices along subdividede edges
440 n0 = 2**(subs-1) - 1
442 # filtered polygonal faces
443 poly_sides = np.array([len(p.vertices) for p in me_low.polygons])
444 mask = poly_sides == sides
445 if bool_material_id:
446 mask_material = [1]*nf
447 me_low.polygons.foreach_get('material_index',mask_material)
448 mask_material = np.array(mask_material) == material_id
449 mask = np.logical_and(mask,mask_material)
450 if bool_selection:
451 mask_selection = [True]*nf
452 me_low.polygons.foreach_get('select',mask_selection)
453 mask_selection = np.array(mask_selection)
454 mask = np.logical_and(mask,mask_selection)
455 polys = np.array(me_low.polygons)[mask]
456 mult = n0**2 + n0
457 ps = poly_sides * mult + 1
458 ps = np.insert(ps,0,nv + nevi, axis=0)[:-1]
459 ips = ps.cumsum()[mask] # incremental polygon sides
460 nf = len(polys)
462 # when subdivided quad faces follows a different pattern
463 if sides == 4:
464 n_patches = nf
465 else:
466 n_patches = nf*sides
468 if sides == 4:
469 patches = np.zeros((nf,n,n),dtype='int')
470 verts = [[vv for vv in p.vertices] for p in polys if len(p.vertices) == sides]
471 verts = np.array(verts).reshape((-1,sides))
473 # filling corners
475 patches[:,0,0] = verts[:,0]
476 patches[:,n-1,0] = verts[:,1]
477 patches[:,n-1,n-1] = verts[:,2]
478 patches[:,0,n-1] = verts[:,3]
480 if subs != 0:
481 shift_verts = np.roll(verts, -1, axis=1)[:,:,None]
482 edge_keys = np.concatenate((shift_verts, verts[:,:,None]), axis=2)
483 edge_keys.sort()
485 edge_verts = np.array(me_low.edge_keys) # edges keys
486 edges_index = np.zeros((ne,ne),dtype='int')
487 edges_index[edge_verts[:,0],edge_verts[:,1]] = np.arange(ne)
489 evi = np.arange(nevi) + nv
490 evi = evi.reshape(ne,n-2) # edges inner verts
491 straight = np.arange(n-2)+1
492 inverted = np.flip(straight)
493 inners = np.array([[j*(n-2)+i for j in range(n-2)] for i in range(n-2)])
495 ek1 = np.array(me_high.edge_keys) # edges keys
496 ids0 = np.arange(ne)*(n-1) # edge keys highres
497 keys0 = ek1[ids0] # first inner edge
498 keys1 = ek1[ids0 + n-2] # last inner edge
499 keys = np.concatenate((keys0,keys1))
500 pick_verts = np.array((inverted,straight))
502 patch_index = np.arange(nf)[:,None,None]
504 # edge 0
505 e0 = edge_keys[:,0] # get edge key (faces, 2)
506 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
507 edge_verts = evi[edge_id] # indexes of inner vertices
508 test = np.concatenate((verts[:,0,None], edge_verts[:,0,None]),axis=1)
509 dir = (test[:,None] == keys).all(2).any(1).astype('int8')
510 #dir = np.full(verts[:,0].shape, 0, dtype='int8')
511 ids = pick_verts[dir][:,None,:] # indexes order along the side
512 patches[patch_index,ids,0] = edge_verts[:,None,:] # assign indexes
513 #patches[:,msk] = inverted # np.flip(patches[msk])
515 # edge 1
516 e0 = edge_keys[:,1] # get edge key (faces, 2)
517 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
518 edge_verts = evi[edge_id] # indexes of inner vertices
519 test = np.concatenate((verts[:,1,None], edge_verts[:,0,None]),axis=1)
520 dir = (test[:,None] == keys).all(2).any(1).astype('int8')
521 ids = pick_verts[dir][:,:,None] # indexes order along the side
522 patches[patch_index,n-1,ids] = edge_verts[:,:,None] # assign indexes
524 # edge 2
525 e0 = edge_keys[:,2] # get edge key (faces, 2)
526 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
527 edge_verts = evi[edge_id] # indexes of inner vertices
528 test = np.concatenate((verts[:,3,None], edge_verts[:,0,None]),axis=1)
529 dir = (test[:,None] == keys).all(2).any(1).astype('int8')
530 ids = pick_verts[dir][:,None,:] # indexes order along the side
531 patches[patch_index,ids,n-1] = edge_verts[:,None,:] # assign indexes
533 # edge 3
534 e0 = edge_keys[:,3] # get edge key (faces, 2)
535 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
536 edge_verts = evi[edge_id] # indexes of inner vertices
537 test = np.concatenate((verts[:,0,None], edge_verts[:,0,None]),axis=1)
538 dir = (test[:,None] == keys).all(2).any(1).astype('int8')
539 ids = pick_verts[dir][:,:,None] # indexes order along the side
540 patches[patch_index,0,ids] = edge_verts[:,:,None] # assign indexes
542 # fill inners
543 patches[:,1:-1,1:-1] = inners[None,:,:] + ips[:,None,None]
545 #end_time = time.time()
546 #print('Tissue: Got Patches in {:.4f} sec'.format(end_time-start_time))
548 return patches, mask
550 def tessellate_prepare_component(ob1, props):
551 mode = props['mode']
552 bounds_x = props['bounds_x']
553 bounds_y = props['bounds_y']
554 scale_mode = props['scale_mode']
555 normals_mode = props['normals_mode']
556 zscale = props['zscale']
557 offset = props['offset']
558 use_origin_offset = props['use_origin_offset']
559 bool_shapekeys = props['bool_shapekeys']
561 thres = 0.005
563 me1 = ob1.data
565 # Component statistics
566 n_verts = len(me1.vertices)
568 # Component bounding box
569 min_c = Vector((0, 0, 0))
570 max_c = Vector((0, 0, 0))
571 first = True
572 for v in me1.vertices:
573 vert = v.co
574 if vert[0] < min_c[0] or first:
575 min_c[0] = vert[0]
576 if vert[1] < min_c[1] or first:
577 min_c[1] = vert[1]
578 if vert[2] < min_c[2] or first:
579 min_c[2] = vert[2]
580 if vert[0] > max_c[0] or first:
581 max_c[0] = vert[0]
582 if vert[1] > max_c[1] or first:
583 max_c[1] = vert[1]
584 if vert[2] > max_c[2] or first:
585 max_c[2] = vert[2]
586 first = False
587 bb = max_c - min_c
589 # adaptive XY
590 verts1 = []
591 for v in me1.vertices:
592 if mode == 'BOUNDS':
593 vert = v.co - min_c # (ob1.matrix_world * v.co) - min_c
594 if use_origin_offset: vert[2] = v.co[2]
595 vert[0] = vert[0] / bb[0] if bb[0] != 0 else 0.5
596 vert[1] = vert[1] / bb[1] if bb[1] != 0 else 0.5
597 if scale_mode == 'CONSTANT' or normals_mode in ('OBJECT', 'SHAPEKEYS'):
598 if not use_origin_offset:
599 vert[2] = vert[2] / bb[2] if bb[2] != 0 else 0
600 vert[2] = vert[2] - 0.5 + offset * 0.5
601 else:
602 if not use_origin_offset:
603 vert[2] = vert[2] + (-0.5 + offset * 0.5) * bb[2]
604 vert[2] *= zscale
605 elif mode == 'LOCAL':
606 vert = v.co.xyz
607 vert[2] *= zscale
608 #vert[2] = (vert[2] - min_c[2] + (-0.5 + offset * 0.5) * bb[2]) * zscale
609 elif mode == 'GLOBAL':
610 vert = ob1.matrix_world @ v.co
611 vert[2] *= zscale
612 try:
613 for sk in me1.shape_keys.key_blocks:
614 sk.data[v.index].co = ob1.matrix_world @ sk.data[v.index].co
615 except: pass
616 v.co = vert
618 # ShapeKeys
619 if bool_shapekeys and ob1.data.shape_keys:
620 for sk in ob1.data.shape_keys.key_blocks:
621 source = sk.data
622 _sk_uv_quads = [0]*len(verts1)
623 _sk_uv = [0]*len(verts1)
624 for i, sk_v in enumerate(source):
625 if mode == 'BOUNDS':
626 sk_vert = sk_v.co - min_c
627 if use_origin_offset: sk_vert[2] = sk_v.co[2]
628 sk_vert[0] = (sk_vert[0] / bb[0] if bb[0] != 0 else 0.5)
629 sk_vert[1] = (sk_vert[1] / bb[1] if bb[1] != 0 else 0.5)
630 if scale_mode == 'CONSTANT' or normals_mode in ('OBJECT', 'SHAPEKEYS'):
631 if not use_origin_offset:
632 sk_vert[2] = (sk_vert[2] / bb[2] if bb[2] != 0 else sk_vert[2])
633 sk_vert[2] = sk_vert[2] - 0.5 + offset * 0.5
634 else:
635 if not use_origin_offset:
636 sk_vert[2] = sk_vert[2] + (- 0.5 + offset * 0.5) * bb[2]
637 sk_vert[2] *= zscale
638 elif mode == 'LOCAL':
639 sk_vert = sk_v.co
640 sk_vert[2] *= zscale
641 elif mode == 'GLOBAL':
642 sk_vert = sk_v.co
643 sk_vert[2] *= zscale
644 sk_v.co = sk_vert
646 if mode != 'BOUNDS' and (bounds_x != 'EXTEND' or bounds_y != 'EXTEND'):
647 ob1.active_shape_key_index = 0
648 bm = bmesh.new()
649 bm.from_mesh(me1)
650 # Bound X
651 planes_co = []
652 planes_no = []
653 bounds = []
654 if bounds_x != 'EXTEND':
655 planes_co += [(0,0,0), (1,0,0)]
656 planes_no += [(-1,0,0), (1,0,0)]
657 bounds += [bounds_x, bounds_x]
658 if bounds_y != 'EXTEND':
659 planes_co += [(0,0,0), (0,1,0)]
660 planes_no += [(0,-1,0), (0,1,0)]
661 bounds += [bounds_y, bounds_y]
662 for co, norm, bound in zip(planes_co, planes_no, bounds):
663 count = 0
664 while True:
665 moved = 0
666 original_edges = list(bm.edges)
667 geom = list(bm.verts) + list(bm.edges) + list(bm.faces)
668 bisect = bmesh.ops.bisect_plane(bm, geom=geom, dist=0,
669 plane_co=co, plane_no=norm, use_snap_center=False,
670 clear_outer=bound=='CLIP', clear_inner=False
672 geom = bisect['geom']
673 cut_edges = [g for g in bisect['geom_cut'] if type(g)==bmesh.types.BMEdge]
674 cut_verts = [g for g in bisect['geom_cut'] if type(g)==bmesh.types.BMVert]
676 if bound!='CLIP':
677 for e in cut_edges:
678 seam = True
679 # Prevent glitches
680 for e1 in original_edges:
681 match_00 = (e.verts[0].co-e1.verts[0].co).length < thres
682 match_11 = (e.verts[1].co-e1.verts[1].co).length < thres
683 match_01 = (e.verts[0].co-e1.verts[1].co).length < thres
684 match_10 = (e.verts[1].co-e1.verts[0].co).length < thres
685 if (match_00 and match_11) or (match_01 and match_10):
686 seam = False
687 break
688 e.seam = seam
690 if bound == 'CYCLIC':
691 geom_verts = []
692 if norm == (-1,0,0):
693 geom_verts = [v for v in bm.verts if v.co.x < 0]
694 if norm == (1,0,0):
695 geom_verts = [v for v in bm.verts if v.co.x > 1]
696 if norm == (0,-1,0):
697 geom_verts = [v for v in bm.verts if v.co.y < 0]
698 if norm == (0,1,0):
699 geom_verts = [v for v in bm.verts if v.co.y > 1]
700 if len(geom_verts) > 0:
701 geom = bmesh.ops.region_extend(bm, geom=geom_verts,
702 use_contract=False, use_faces=False, use_face_step=True
704 geom = bmesh.ops.split(bm, geom=geom['geom'], use_only_faces=False)
705 vec = Vector(norm)
706 move_verts = [g for g in geom['geom'] if type(g)==bmesh.types.BMVert]
707 bmesh.ops.translate(bm, vec=-vec, verts=move_verts)
708 for key in bm.verts.layers.shape.keys():
709 sk = bm.verts.layers.shape.get(key)
710 for v in move_verts:
711 v[sk] -= vec
712 moved += len(move_verts)
713 count += 1
714 if moved == 0 or count > 1000: break
715 bm.to_mesh(me1)
717 com_area = bb[0]*bb[1]
718 return ob1, com_area
720 def get_quads(me, bool_selection):
721 nf = len(me.polygons)
723 verts = []
724 materials = []
725 mask = []
726 for poly in me.polygons:
727 p = list(poly.vertices)
728 sides = len(p)
729 if sides == 3:
730 verts.append([[p[0], p[-1]], [p[1], p[2]]])
731 materials.append(poly.material_index)
732 mask.append(poly.select if bool_selection else True)
733 elif sides == 4:
734 verts.append([[p[0], p[3]], [p[1], p[2]]])
735 materials.append(poly.material_index)
736 mask.append(poly.select if bool_selection else True)
737 else:
738 while True:
739 new_poly = [[p[-2], p.pop(-1)], [p[1], p.pop(0)]]
740 verts.append(new_poly)
741 materials.append(poly.material_index)
742 mask.append(poly.select if bool_selection else True)
743 if len(p) < 3: break
744 mask = np.array(mask)
745 materials = np.array(materials)[mask]
746 verts = np.array(verts)[mask]
747 return verts, mask, materials
749 def get_patches(me_low, me_high, sides, subs, bool_selection): #, bool_material_id, material_id):
750 nv = len(me_low.vertices) # number of vertices
751 ne = len(me_low.edges) # number of edges
752 nf = len(me_low.polygons) # number of polygons
753 n = 2**subs + 1
754 nev = ne * n # number of vertices along the subdivided edges
755 nevi = nev - 2*ne # internal vertices along subdividede edges
757 n0 = 2**(subs-1) - 1
759 # filtered polygonal faces
760 poly_sides = [0]*nf
761 me_low.polygons.foreach_get('loop_total',poly_sides)
762 poly_sides = np.array(poly_sides)
763 mask = poly_sides == sides
765 if bool_selection:
766 mask_selection = [True]*nf
767 me_low.polygons.foreach_get('select',mask_selection)
768 mask = np.array(mask_selection)
770 materials = [1]*nf
771 me_low.polygons.foreach_get('material_index',materials)
772 materials = np.array(materials)[mask]
774 polys = np.array(me_low.polygons)[mask]
775 mult = n0**2 + n0
776 ps = poly_sides * mult + 1
777 ps = np.insert(ps,0,nv + nevi, axis=0)[:-1]
778 ips = ps.cumsum()[mask] # incremental polygon sides
779 nf = len(polys)
781 # when subdivided quad faces follows a different pattern
782 if sides == 4:
783 n_patches = nf
784 else:
785 n_patches = nf*sides
787 if sides == 4:
788 patches = np.empty((nf,n,n),dtype='int')
789 verts = [list(p.vertices) for p in polys if len(p.vertices) == sides]
790 verts = np.array(verts).reshape((-1,sides))
792 # filling corners
794 patches[:,0,0] = verts[:,0]
795 patches[:,n-1,0] = verts[:,1]
796 patches[:,n-1,n-1] = verts[:,2]
797 patches[:,0,n-1] = verts[:,3]
799 if subs != 0:
800 shift_verts = np.roll(verts, -1, axis=1)[:,:,None]
801 edge_keys = np.concatenate((shift_verts, verts[:,:,None]), axis=2)
802 edge_keys.sort()
804 edge_verts = np.array(me_low.edge_keys) # edges keys
805 edges_index = np.empty((ne,ne),dtype='int')
806 edges_index[edge_verts[:,0],edge_verts[:,1]] = np.arange(ne)
808 evi = np.arange(nevi) + nv
809 evi = evi.reshape(ne,n-2) # edges inner verts
810 straight = np.arange(n-2)+1
811 inverted = np.flip(straight)
812 inners = np.array([[j*(n-2)+i for j in range(n-2)] for i in range(n-2)])
814 ek1 = me_high.edge_keys # edges keys
815 ek1 = np.array(ek1) # edge keys highres
816 keys0 = ek1[np.arange(ne)*(n-1)] # first inner edge
817 keys1 = ek1[np.arange(ne)*(n-1)+n-2] # last inner edge
818 edges_dir = np.zeros((nev,nev),dtype='bool') # Better memory usage
819 #edges_dir = np.zeros((nev,nev),dtype='int8') ### Memory usage not efficient, dictionary as alternative?
820 edges_dir[keys0[:,0], keys0[:,1]] = 1
821 edges_dir[keys1[:,0], keys1[:,1]] = 1
822 pick_verts = np.array((inverted,straight))
824 patch_index = np.arange(nf)[:,None,None]
826 # edge 0
827 e0 = edge_keys[:,0] # get edge key (faces, 2)
828 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
829 edge_verts = evi[edge_id] # indexes of inner vertices
830 dir = edges_dir[verts[:,0], edge_verts[:,0]] # check correct direction
831 ids = pick_verts[dir.astype('int8')][:,None,:] # indexes order along the side
832 patches[patch_index,ids,0] = edge_verts[:,None,:] # assign indexes
834 # edge 1
835 e0 = edge_keys[:,1] # get edge key (faces, 2)
836 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
837 edge_verts = evi[edge_id] # indexes of inner vertices
838 dir = edges_dir[verts[:,1], edge_verts[:,0]] # check correct direction
839 ids = pick_verts[dir.astype('int8')][:,:,None] # indexes order along the side
840 patches[patch_index,n-1,ids] = edge_verts[:,:,None] # assign indexes
842 # edge 2
843 e0 = edge_keys[:,2] # get edge key (faces, 2)
844 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
845 edge_verts = evi[edge_id] # indexes of inner vertices
846 dir = edges_dir[verts[:,3], edge_verts[:,0]] # check correct direction
847 ids = pick_verts[dir.astype('int8')][:,None,:] # indexes order along the side
848 patches[patch_index,ids,n-1] = edge_verts[:,None,:] # assign indexes
850 # edge 3
851 e0 = edge_keys[:,3] # get edge key (faces, 2)
852 edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
853 edge_verts = evi[edge_id] # indexes of inner vertices
854 dir = edges_dir[verts[:,0], edge_verts[:,0]] # check correct direction
855 ids = pick_verts[dir.astype('int8')][:,:,None] # indexes order along the side
856 patches[patch_index,0,ids] = edge_verts[:,:,None] # assign indexes
858 # fill inners
859 patches[:,1:-1,1:-1] = inners[None,:,:] + ips[:,None,None]
861 return patches, mask, materials
863 def get_vertices_numpy(mesh):
865 Create a numpy array with the vertices of a given mesh
867 n_verts = len(mesh.vertices)
868 verts = [0]*n_verts*3
869 mesh.vertices.foreach_get('co', verts)
870 verts = np.array(verts).reshape((n_verts,3))
871 return verts
873 def get_vertices_and_normals_numpy(mesh):
875 Create two numpy arrays with the vertices and the normals of a given mesh
877 n_verts = len(mesh.vertices)
878 verts = [0]*n_verts*3
879 normals = [0]*n_verts*3
880 mesh.vertices.foreach_get('co', verts)
881 mesh.vertices.foreach_get('normal', normals)
882 verts = np.array(verts).reshape((n_verts,3))
883 normals = np.array(normals).reshape((n_verts,3))
884 return verts, normals
886 def get_normals_numpy(mesh):
888 Create a numpy array with the normals of a given mesh
890 n_verts = len(mesh.vertices)
891 normals = [0]*n_verts*3
892 mesh.vertices.foreach_get('normal', normals)
893 normals = np.array(normals).reshape((n_verts,3))
894 return normals
896 def get_edges_numpy(mesh):
898 Create a numpy array with the edges of a given mesh
900 n_edges = len(mesh.edges)
901 edges = [0]*n_edges*2
902 mesh.edges.foreach_get('vertices', edges)
903 edges = np.array(edges).reshape((n_edges,2)).astype('int')
904 return edges
906 def get_edges_id_numpy(mesh):
907 n_edges = len(mesh.edges)
908 edges = [0]*n_edges*2
909 mesh.edges.foreach_get('vertices', edges)
910 edges = np.array(edges).reshape((n_edges,2))
911 indexes = np.arange(n_edges).reshape((n_edges,1))
912 edges = np.concatenate((edges,indexes), axis=1)
913 return edges
915 def get_polygons_select_numpy(mesh):
916 n_polys = len(mesh.polygons)
917 selections = [0]*n_polys*2
918 mesh.polygons.foreach_get('select', selections)
919 selections = np.array(selections)
920 return selections
922 def get_attribute_numpy(elements_list, attribute='select', mult=1):
924 Generate a numpy array getting attribute from a list of element using
925 the foreach_get() function.
927 n_elements = len(elements_list)
928 values = [0]*n_elements*mult
929 elements_list.foreach_get(attribute, values)
930 values = np.array(values)
931 if mult > 1: values = values.reshape((n_elements,mult))
932 return values
934 def get_vertices(mesh):
935 n_verts = len(mesh.vertices)
936 verts = [0]*n_verts*3
937 mesh.vertices.foreach_get('co', verts)
938 verts = np.array(verts).reshape((n_verts,3))
939 verts = [Vector(v) for v in verts]
940 return verts
942 def get_faces(mesh):
943 faces = [[v for v in f.vertices] for f in mesh.polygons]
944 return faces
946 def get_faces_numpy(mesh):
947 faces = [[v for v in f.vertices] for f in mesh.polygons]
948 return np.array(faces)
950 def get_faces_edges_numpy(mesh):
951 faces = [v.edge_keys for f in mesh.polygons]
952 return np.array(faces)
954 def find_curves(edges, n_verts):
955 verts_dict = {key:[] for key in range(n_verts)}
956 for e in edges:
957 verts_dict[e[0]].append(e[1])
958 verts_dict[e[1]].append(e[0])
959 curves = []
960 while True:
961 if len(verts_dict) == 0: break
962 # next starting point
963 v = list(verts_dict.keys())[0]
964 # neighbors
965 v01 = verts_dict[v]
966 if len(v01) == 0:
967 verts_dict.pop(v)
968 continue
969 curve = []
970 if len(v01) > 1: curve.append(v01[1]) # add neighbors
971 curve.append(v) # add starting point
972 curve.append(v01[0]) # add neighbors
973 verts_dict.pop(v)
974 # start building curve
975 while True:
976 #last_point = curve[-1]
977 #if last_point not in verts_dict: break
979 # try to change direction if needed
980 if curve[-1] in verts_dict: pass
981 elif curve[0] in verts_dict: curve.reverse()
982 else: break
984 # neighbors points
985 last_point = curve[-1]
986 v01 = verts_dict[last_point]
988 # curve end
989 if len(v01) == 1:
990 verts_dict.pop(last_point)
991 if curve[0] in verts_dict: continue
992 else: break
994 # chose next point
995 new_point = None
996 if v01[0] == curve[-2]: new_point = v01[1]
997 elif v01[1] == curve[-2]: new_point = v01[0]
998 #else: break
1000 #if new_point != curve[1]:
1001 curve.append(new_point)
1002 verts_dict.pop(last_point)
1003 if curve[0] == curve[-1]:
1004 verts_dict.pop(new_point)
1005 break
1006 curves.append(curve)
1007 return curves
1009 def curve_from_points(points, name='Curve'):
1010 curve = bpy.data.curves.new(name,'CURVE')
1011 for c in points:
1012 s = curve.splines.new('POLY')
1013 s.points.add(len(c))
1014 for i,p in enumerate(c): s.points[i].co = p.xyz + [1]
1015 ob_curve = bpy.data.objects.new(name,curve)
1016 return ob_curve
1018 def curve_from_pydata(points, radii, indexes, name='Curve', skip_open=False, merge_distance=1, set_active=True, only_data=False):
1019 curve = bpy.data.curves.new(name,'CURVE')
1020 curve.dimensions = '3D'
1021 use_rad = True
1022 for c in indexes:
1023 bool_cyclic = c[0] == c[-1]
1024 if bool_cyclic: c.pop(-1)
1025 # cleanup
1026 pts = np.array([points[i] for i in c])
1027 try:
1028 rad = np.array([radii[i] for i in c])
1029 except:
1030 use_rad = False
1031 rad = 1
1032 if merge_distance > 0:
1033 pts1 = np.roll(pts,1,axis=0)
1034 dist = np.linalg.norm(pts1-pts, axis=1)
1035 count = 0
1036 n = len(dist)
1037 mask = np.ones(n).astype('bool')
1038 for i in range(n):
1039 count += dist[i]
1040 if count > merge_distance: count = 0
1041 else: mask[i] = False
1042 pts = pts[mask]
1043 if use_rad: rad = rad[mask]
1045 if skip_open and not bool_cyclic: continue
1046 s = curve.splines.new('POLY')
1047 n_pts = len(pts)
1048 s.points.add(n_pts-1)
1049 w = np.ones(n_pts).reshape((n_pts,1))
1050 co = np.concatenate((pts,w),axis=1).reshape((n_pts*4))
1051 s.points.foreach_set('co',co)
1052 if use_rad: s.points.foreach_set('radius',rad)
1053 s.use_cyclic_u = bool_cyclic
1054 if only_data:
1055 return curve
1056 else:
1057 ob_curve = bpy.data.objects.new(name,curve)
1058 bpy.context.collection.objects.link(ob_curve)
1059 if set_active:
1060 bpy.context.view_layer.objects.active = ob_curve
1061 return ob_curve
1063 def update_curve_from_pydata(curve, points, normals, radii, indexes, merge_distance=1, pattern=[1,0], depth=0.1, offset=0):
1064 curve.splines.clear()
1065 use_rad = True
1066 for ic, c in enumerate(indexes):
1067 bool_cyclic = c[0] == c[-1]
1068 if bool_cyclic: c.pop(-1)
1070 # cleanup
1071 pts = np.array([points[i] for i in c if i != None])
1072 nor = np.array([normals[i] for i in c if i != None])
1073 try:
1074 rad = np.array([radii[i] for i in c if i != None])
1075 except:
1076 use_rad = False
1077 rad = 1
1078 if merge_distance > 0:
1079 pts1 = np.roll(pts,1,axis=0)
1080 dist = np.linalg.norm(pts1-pts, axis=1)
1081 count = 0
1082 n = len(dist)
1083 mask = np.ones(n).astype('bool')
1084 for i in range(n):
1085 count += dist[i]
1086 if count > merge_distance: count = 0
1087 else: mask[i] = False
1088 pts = pts[mask]
1089 nor = nor[mask]
1090 if use_rad: rad = rad[mask]
1091 #if skip_open and not bool_cyclic: continue
1092 n_pts = len(pts)
1093 series = np.arange(n_pts)
1094 patt1 = series + (series-series%pattern[1])/pattern[1]*pattern[0]+pattern[0]
1095 patt1 = patt1[patt1<n_pts].astype('int')
1096 patt0 = series + (series-series%pattern[0])/pattern[0]*pattern[1]
1097 patt0 = patt0[patt0<n_pts].astype('int')
1098 nor[patt0] *= 0.5*depth*(1 + offset)
1099 nor[patt1] *= 0.5*depth*(-1 + offset)
1100 if pattern[0]*pattern[1] != 0: pts += nor
1101 s = curve.splines.new('POLY')
1102 s.points.add(n_pts-1)
1103 w = np.ones(n_pts).reshape((n_pts,1))
1104 co = np.concatenate((pts,w),axis=1).reshape((n_pts*4))
1105 s.points.foreach_set('co',co)
1106 if use_rad: s.points.foreach_set('radius',rad)
1107 s.use_cyclic_u = bool_cyclic
1109 def loops_from_bmesh(edges):
1111 Return one or more loops given some starting edges.
1112 :arg edges: Edges used as seeds.
1113 :type edges: List of :class:'bmesh.types.BMEdge'
1114 :return: Elements in each loop (Verts, Edges), where:
1115 - Verts - List of Lists of :class:'bmesh.types.BMVert'
1116 - Edges - List of Lists of :class:'bmesh.types.BMEdge'
1117 :rtype: tuple
1119 todo_edges = list(edges)
1120 #todo_edges = [e.index for e in bm.edges]
1121 vert_loops = []
1122 edge_loops = []
1123 while len(todo_edges) > 0:
1124 edge = todo_edges[0]
1125 vert_loop, edge_loop = run_edge_loop(edge)
1126 for e in edge_loop:
1127 try: todo_edges.remove(e)
1128 except: pass
1129 edge_loops.append(edge_loop)
1130 vert_loops.append(vert_loop)
1131 #if len(todo_edges) == 0: break
1132 return vert_loops, edge_loops
1134 def run_edge_loop_direction(edge,vert):
1136 Return vertices and edges along a loop in a specific direction.
1137 :arg edge: Edges used as seed.
1138 :type edges: :class:'bmesh.types.BMEdge'
1139 :arg edge: Vertex of the Edge used for the direction.
1140 :type vert: :class:'bmesh.types.BMVert'
1141 :return: Elements in the loop (Verts, Edges), where:
1142 - Verts - List of :class:'bmesh.types.BMVert'
1143 - Edges - List of :class:'bmesh.types.BMEdge'
1144 :rtype: tuple
1146 edge0 = edge
1147 edge_loop = [edge]
1148 vert_loop = [vert]
1149 while True:
1150 link_edges = list(vert.link_edges)
1151 link_edges.remove(edge)
1152 n_edges = len(link_edges)
1153 if n_edges == 1:
1154 edge = link_edges[0]
1155 elif n_edges < 4:
1156 link_faces = edge.link_faces
1157 if len(link_faces) == 0: break
1158 edge = None
1159 for e in link_edges:
1160 link_faces1 = e.link_faces
1161 if len(link_faces) == len(link_faces1):
1162 common_faces = [f for f in link_faces1 if f in link_faces]
1163 if len(common_faces) == 0:
1164 edge = e
1165 break
1166 else: break
1167 if edge == None: break
1168 edge_loop.append(edge)
1169 vert = edge.other_vert(vert)
1170 vert_loop.append(vert)
1171 if edge == edge0: break
1172 return vert_loop, edge_loop
1174 def run_edge_loop(edge):
1176 Return vertices and edges along a loop in both directions.
1177 :arg edge: Edges used as seed.
1178 :type edges: :class:'bmesh.types.BMEdge'
1179 :return: Elements in the loop (Verts, Edges), where:
1180 - Verts - List of :class:'bmesh.types.BMVert'
1181 - Edges - List of :class:'bmesh.types.BMEdge'
1182 :rtype: tuple
1184 vert0 = edge.verts[0]
1185 vert_loop0, edge_loop0 = run_edge_loop_direction(edge, vert0)
1186 if len(edge_loop0) == 1 or edge_loop0[0] != edge_loop0[-1]:
1187 vert1 = edge.verts[1]
1188 vert_loop1, edge_loop1 = run_edge_loop_direction(edge, vert1)
1189 edge_loop0.reverse()
1190 vert_loop0.reverse()
1191 edge_loop = edge_loop0[:-1] + edge_loop1
1192 vert_loop = vert_loop0 + vert_loop1
1193 else:
1194 edge_loop = edge_loop0[1:]
1195 vert_loop = vert_loop0
1196 return vert_loop, edge_loop
1198 def curve_from_vertices(indexes, verts, name='Curve'):
1200 Curve data from given vertices.
1201 :arg indexes: List of Lists of indexes of the vertices.
1202 :type indexes: List of Lists of int
1203 :arg verts: List of vertices.
1204 :type verts: List of :class:'bpy.types.MeshVertex'
1205 :arg name: Name of the Curve data.
1206 :type name: str
1207 :return: Generated Curve data
1208 :rtype: :class:'bpy.types.Curve'
1210 curve = bpy.data.curves.new(name,'CURVE')
1211 for c in indexes:
1212 s = curve.splines.new('POLY')
1213 s.points.add(len(c))
1214 for i,p in enumerate(c):
1215 s.points[i].co = verts[p].co.xyz + [1]
1216 #s.points[i].tilt = degrees(asin(verts[p].co.z))
1217 ob_curve = bpy.data.objects.new(name,curve)
1218 return ob_curve
1220 def nurbs_from_vertices(indexes, co, radii=[], name='Curve', set_active=True, interpolation='POLY'):
1221 curve = bpy.data.curves.new(name,'CURVE')
1222 curve.dimensions = '3D'
1223 curve.resolution_u = 2
1224 curve.bevel_depth = 0.01
1225 curve.bevel_resolution = 0
1226 for pts in indexes:
1227 s = curve.splines.new(interpolation)
1228 n_pts = len(pts)
1229 s.points.add(n_pts-1)
1230 w = np.ones(n_pts).reshape((n_pts,1))
1231 curve_co = np.concatenate((co[pts],w),axis=1).reshape((n_pts*4))
1232 s.points.foreach_set('co',curve_co)
1233 try:
1234 s.points.foreach_set('radius',radii[pts])
1235 except: pass
1236 s.use_endpoint_u = True
1238 ob_curve = bpy.data.objects.new(name,curve)
1239 bpy.context.collection.objects.link(ob_curve)
1240 if set_active:
1241 bpy.context.view_layer.objects.active = ob_curve
1242 ob_curve.select_set(True)
1243 return ob_curve
1245 # ------------------------------------------------------------------
1246 # VERTEX GROUPS AND WEIGHT
1247 # ------------------------------------------------------------------
1249 def get_weight(vertex_group, n_verts):
1251 Read weight values from given Vertex Group.
1252 :arg vertex_group: Vertex Group.
1253 :type vertex_group: :class:'bpy.types.VertexGroup'
1254 :arg n_verts: Number of Vertices (output list size).
1255 :type n_verts: int
1256 :return: Read weight values.
1257 :rtype: list
1259 weight = [0]*n_verts
1260 for i in range(n_verts):
1261 try: weight[i] = vertex_group.weight(i)
1262 except: pass
1263 return weight
1265 def get_weight_numpy(vertex_group, n_verts):
1267 Read weight values from given Vertex Group.
1268 :arg vertex_group: Vertex Group.
1269 :type vertex_group: :class:'bpy.types.VertexGroup'
1270 :arg n_verts: Number of Vertices (output list size).
1271 :type n_verts: int
1272 :return: Read weight values as numpy array.
1273 :rtype: :class:'numpy.ndarray'
1275 weight = [0]*n_verts
1276 for i in range(n_verts):
1277 try: weight[i] = vertex_group.weight(i)
1278 except: pass
1279 return np.array(weight)
1281 def bmesh_get_weight_numpy(group_index, layer, verts):
1282 weight = np.zeros(len(verts))
1283 for i, v in enumerate(verts):
1284 dvert = v[layer]
1285 if group_index in dvert:
1286 weight[i] = dvert[group_index]
1287 #dvert[group_index] = 0.5
1288 return weight
1290 def bmesh_set_weight_numpy(group_index, layer, verts, weight):
1291 for i, v in enumerate(verts):
1292 dvert = v[layer]
1293 if group_index in dvert:
1294 dvert[group_index] = weight[i]
1295 return verts
1297 def bmesh_set_weight_numpy(bm, group_index, weight):
1298 layer = bm.verts.layers.deform.verify()
1299 for i, v in enumerate(bm.verts):
1300 dvert = v[layer]
1301 #if group_index in dvert:
1302 dvert[group_index] = weight[i]
1303 return bm
1305 def set_weight_numpy(vg, weight):
1306 for i, w in enumerate(weight):
1307 vg.add([i], w, 'REPLACE')
1308 return vg
1310 def uv_from_bmesh(bm, uv_index=None):
1311 if uv_index:
1312 uv_lay = bm.loops.layers.uv[uv_index]
1313 else:
1314 uv_lay = bm.loops.layers.uv.active
1315 uv_co = [0]*len(bm.verts)
1317 for face in bm.faces:
1318 for vert,loop in zip(face.verts, face.loops):
1319 uv_co[vert.index] = loop[uv_lay].uv
1320 return uv_co
1322 def get_uv_edge_vectors(me, uv_map = 0, only_positive=False):
1323 count = 0
1324 uv_vectors = {}
1325 for i, f in enumerate(me.polygons):
1326 f_verts = len(f.vertices)
1327 for j0 in range(f_verts):
1328 j1 = (j0+1)%f_verts
1329 uv0 = me.uv_layers[uv_map].data[count+j0].uv
1330 uv1 = me.uv_layers[uv_map].data[count+j1].uv
1331 delta_uv = (uv1-uv0).normalized()
1332 if only_positive:
1333 delta_uv.x = abs(delta_uv.x)
1334 delta_uv.y = abs(delta_uv.y)
1335 edge_key = tuple(sorted([f.vertices[j0], f.vertices[j1]]))
1336 uv_vectors[edge_key] = delta_uv
1337 count += f_verts
1338 uv_vectors = [uv_vectors[tuple(sorted(e.vertices))] for e in me.edges]
1339 return uv_vectors
1341 def mesh_diffusion(me, values, iter, diff=0.2, uv_dir=0):
1342 values = np.array(values)
1343 n_verts = len(me.vertices)
1345 n_edges = len(me.edges)
1346 edge_verts = [0]*n_edges*2
1347 #me.edges.foreach_get("vertices", edge_verts)
1349 count = 0
1350 edge_verts = []
1351 uv_factor = {}
1352 uv_ang = (0.5 + uv_dir*0.5)*pi/2
1353 uv_vec = Vector((cos(uv_ang), sin(uv_ang)))
1354 for i, f in enumerate(me.polygons):
1355 f_verts = len(f.vertices)
1356 for j0 in range(f_verts):
1357 j1 = (j0+1)%f_verts
1358 if uv_dir != 0:
1359 uv0 = me.uv_layers[0].data[count+j0].uv
1360 uv1 = me.uv_layers[0].data[count+j1].uv
1361 delta_uv = (uv1-uv0).normalized()
1362 delta_uv.x = abs(delta_uv.x)
1363 delta_uv.y = abs(delta_uv.y)
1364 dir = uv_vec.dot(delta_uv)
1365 else:
1366 dir = 1
1367 #dir = abs(dir)
1368 #uv_factor.append(dir)
1369 edge_key = [f.vertices[j0], f.vertices[j1]]
1370 edge_key.sort()
1371 uv_factor[tuple(edge_key)] = dir
1372 count += f_verts
1373 id0 = []
1374 id1 = []
1375 uv_mult = []
1376 for ek, val in uv_factor.items():
1377 id0.append(ek[0])
1378 id1.append(ek[1])
1379 uv_mult.append(val)
1380 id0 = np.array(id0)
1381 id1 = np.array(id1)
1382 uv_mult = np.array(uv_mult)
1384 #edge_verts = np.array(edge_verts)
1385 #arr = np.arange(n_edges)*2
1387 #id0 = edge_verts[arr] # first vertex indices for each edge
1388 #id1 = edge_verts[arr+1] # second vertex indices for each edge
1389 for ii in range(iter):
1390 lap = np.zeros(n_verts)
1391 if uv_dir != 0:
1392 lap0 = (values[id1] - values[id0])*uv_mult # laplacian increment for first vertex of each edge
1393 else:
1394 lap0 = (values[id1] - values[id0])
1395 np.add.at(lap, id0, lap0)
1396 np.add.at(lap, id1, -lap0)
1397 values += diff*lap
1398 return values
1400 def mesh_diffusion_vector(me, vectors, iter, diff, uv_dir=0):
1401 vectors = np.array(vectors)
1402 x = vectors[:,0]
1403 y = vectors[:,1]
1404 z = vectors[:,2]
1405 x = mesh_diffusion(me, x, iter, diff, uv_dir)
1406 y = mesh_diffusion(me, y, iter, diff, uv_dir)
1407 z = mesh_diffusion(me, z, iter, diff, uv_dir)
1408 vectors[:,0] = x
1409 vectors[:,1] = y
1410 vectors[:,2] = z
1411 return vectors
1413 # ------------------------------------------------------------------
1414 # MODIFIERS
1415 # ------------------------------------------------------------------
1417 def mod_preserve_topology(mod):
1418 same_topology_modifiers = ('DATA_TRANSFER','NORMAL_EDIT','WEIGHTED_NORMAL',
1419 'UV_PROJECT','UV_WARP','VERTEX_WEIGHT_EDIT','VERTEX_WEIGHT_MIX',
1420 'VERTEX_WEIGHT_PROXIMITY','ARMATURE','CAST','CURVE','DISPLACE','HOOK',
1421 'LAPLACIANDEFORM','LATTICE','MESH_DEFORM','SHRINKWRAP','SIMPLE_DEFORM',
1422 'SMOOTH','CORRECTIVE_SMOOTH','LAPLACIANSMOOTH','SURFACE_DEFORM','WARP',
1423 'WAVE','CLOTH','COLLISION','DYNAMIC_PAINT','SOFT_BODY'
1425 return mod.type in same_topology_modifiers
1427 def mod_preserve_shape(mod):
1428 same_shape_modifiers = ('DATA_TRANSFER','NORMAL_EDIT','WEIGHTED_NORMAL',
1429 'UV_PROJECT','UV_WARP','VERTEX_WEIGHT_EDIT','VERTEX_WEIGHT_MIX',
1430 'VERTEX_WEIGHT_PROXIMITY','DYNAMIC_PAINT'
1432 return mod.type in same_shape_modifiers
1435 def recurLayerCollection(layerColl, collName):
1437 Recursively transverse layer_collection for a particular name.
1439 found = None
1440 if (layerColl.name == collName):
1441 return layerColl
1442 for layer in layerColl.children:
1443 found = recurLayerCollection(layer, collName)
1444 if found:
1445 return found
1447 def auto_layer_collection():
1449 Automatically change active layer collection.
1451 layer = bpy.context.view_layer.active_layer_collection
1452 layer_collection = bpy.context.view_layer.layer_collection
1453 if layer.hide_viewport or layer.collection.hide_viewport:
1454 collections = bpy.context.object.users_collection
1455 for c in collections:
1456 lc = recurLayerCollection(layer_collection, c.name)
1457 if not c.hide_viewport and not lc.hide_viewport:
1458 bpy.context.view_layer.active_layer_collection = lc