Update for 2.8
[blender-addons.git] / mocap / mocap_tools.py
blob3f91041183b3acb5adbd750e87be949c88030593
1 # ##### BEGIN GPL LICENSE BLOCK #####
3 # This program is free software; you can redistribute it and/or
4 # modify it under the terms of the GNU General Public License
5 # as published by the Free Software Foundation; either version 2
6 # of the License, or (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, write to the Free Software Foundation,
15 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
17 # ##### END GPL LICENSE BLOCK #####
19 # <pep8 compliant>
21 from math import sqrt, radians, floor, ceil
22 import bpy
23 import time
24 from mathutils import Vector, Matrix
27 # A Python implementation of n sized Vectors.
28 # Mathutils has a max size of 4, and we need at least 5 for Simplify Curves and even more for Cross Correlation.
29 # Vector utility functions
30 class NdVector:
31 vec = []
33 def __init__(self, vec):
34 self.vec = vec[:]
36 def __len__(self):
37 return len(self.vec)
39 def __mul__(self, otherMember):
40 # assume anything with list access is a vector
41 if isinstance(otherMember, NdVector):
42 a = self.vec
43 b = otherMember.vec
44 n = len(self)
45 return sum([a[i] * b[i] for i in range(n)])
46 else:
47 # int/float
48 return NdVector([otherMember * x for x in self.vec])
50 def __sub__(self, otherVec):
51 a = self.vec
52 b = otherVec.vec
53 n = len(self)
54 return NdVector([a[i] - b[i] for i in range(n)])
56 def __add__(self, otherVec):
57 a = self.vec
58 b = otherVec.vec
59 n = len(self)
60 return NdVector([a[i] + b[i] for i in range(n)])
62 def __div__(self, scalar):
63 return NdVector([x / scalar for x in self.vec])
65 @property
66 def length(self):
67 return sqrt(self * self)
69 @property
70 def lengthSq(self):
71 return (self * self)
73 def normalize(self):
74 len = self.length
75 self.vec = [x / len for x in self.vec]
77 def copy(self):
78 return NdVector(self.vec)
80 def __getitem__(self, i):
81 return self.vec[i]
83 @property
84 def x(self):
85 return self.vec[0]
87 @property
88 def y(self):
89 return self.vec[1]
91 def resize_2d(self):
92 return Vector((self.x, self.y))
95 #Sampled Data Point class for Simplify Curves
96 class dataPoint:
97 index = 0
98 # x,y1,y2,y3 coordinate of original point
99 co = NdVector((0, 0, 0, 0, 0))
100 #position according to parametric view of original data, [0,1] range
101 u = 0
102 #use this for anything
103 temp = 0
105 def __init__(self, index, co, u=0):
106 self.index = index
107 self.co = co
108 self.u = u
111 # Helper to convert from a sampled fcurve back to editable keyframes one.
112 def make_editable_fcurves(fcurves):
113 for fc in fcurves:
114 if fc.sampled_points:
115 fc.convert_to_keyframes(floor(fc.sampled_points[0].co[0]), ceil(fc.sampled_points[-1].co[0]) + 1)
118 #Cross Correlation Function
119 #http://en.wikipedia.org/wiki/Cross_correlation
120 #IN: curvesA, curvesB - bpy_collection/list of fcurves to analyze. Auto-Correlation is when they are the same.
121 # margin - When searching for the best "start" frame, how large a neighborhood of frames should we inspect (similar to epsilon in Calculus)
122 #OUT: startFrame, length of new anim, and curvesA
123 def crossCorrelationMatch(curvesA, curvesB, margin):
124 dataA = []
125 dataB = []
126 start = int(max(curvesA[0].range()[0], curvesB[0].range()[0]))
127 end = int(min(curvesA[0].range()[1], curvesB[0].range()[1]))
129 #transfer all fcurves data on each frame to a single NdVector.
130 for i in range(1, end):
131 vec = []
132 for fcurve in curvesA:
133 if fcurve.data_path in [otherFcurve.data_path for otherFcurve in curvesB]:
134 vec.append(fcurve.evaluate(i))
135 dataA.append(NdVector(vec))
136 vec = []
137 for fcurve in curvesB:
138 if fcurve.data_path in [otherFcurve.data_path for otherFcurve in curvesA]:
139 vec.append(fcurve.evaluate(i))
140 dataB.append(NdVector(vec))
142 #Comparator for Cross Correlation. "Classic" implementation uses dot product, as do we.
143 def comp(a, b):
144 return a * b
146 #Create Rxy, which holds the Cross Correlation data.
147 N = len(dataA)
148 Rxy = [0.0] * N
149 for i in range(N):
150 for j in range(i, min(i + N, N)):
151 Rxy[i] += comp(dataA[j], dataB[j - i])
152 for j in range(i):
153 Rxy[i] += comp(dataA[j], dataB[j - i + N])
154 Rxy[i] /= float(N)
156 #Find the Local maximums in the Cross Correlation data via numerical derivative.
157 def LocalMaximums(Rxy):
158 Rxyd = [Rxy[i] - Rxy[i - 1] for i in range(1, len(Rxy))]
159 maxs = []
160 for i in range(1, len(Rxyd) - 1):
161 a = Rxyd[i - 1]
162 b = Rxyd[i]
163 #sign change (zerocrossing) at point i, denoting max point (only)
164 if (a >= 0 and b < 0) or (a < 0 and b >= 0):
165 maxs.append((i, max(Rxy[i], Rxy[i - 1])))
166 return [x[0] for x in maxs]
167 #~ return max(maxs, key=lambda x: x[1])[0]
169 #flms - the possible offsets of the first part of the animation. In Auto-Corr, this is the length of the loop.
170 flms = LocalMaximums(Rxy[0:int(len(Rxy))])
171 ss = []
173 #for every local maximum, find the best one - i.e. also has the best start frame.
174 for flm in flms:
175 diff = []
177 for i in range(len(dataA) - flm):
178 diff.append((dataA[i] - dataB[i + flm]).lengthSq)
180 def lowerErrorSlice(diff, e):
181 #index, error at index
182 bestSlice = (0, 100000)
183 for i in range(e, len(diff) - e):
184 errorSlice = sum(diff[i - e:i + e + 1])
185 if errorSlice < bestSlice[1]:
186 bestSlice = (i, errorSlice, flm)
187 return bestSlice
189 s = lowerErrorSlice(diff, margin)
190 ss.append(s)
192 #Find the best result and return it.
193 ss.sort(key=lambda x: x[1])
194 return ss[0][2], ss[0][0], dataA
197 #Uses auto correlation (cross correlation of the same set of curves) and trims the active_object's fcurves
198 #Except for location curves (which in mocap tend to be not cyclic, e.g. a walk cycle forward)
199 #Transfers the fcurve data to a list of NdVector (length of list is number of fcurves), and calls the cross correlation function.
200 #Then trims the fcurve accordingly.
201 #IN: Nothing, set the object you want as active and call. Assumes object has animation_data.action!
202 #OUT: Trims the object's fcurves (except location curves).
203 def autoloop_anim():
204 context = bpy.context
205 obj = context.active_object
207 def locCurve(x):
208 x.data_path == "location"
210 fcurves = [x for x in obj.animation_data.action.fcurves if not locCurve(x)]
212 margin = 10
214 flm, s, data = crossCorrelationMatch(fcurves, fcurves, margin)
215 loop = data[s:s + flm]
217 #performs blending with a root falloff on the seam's neighborhood to ensure good tiling.
218 for i in range(1, margin + 1):
219 w1 = sqrt(float(i) / margin)
220 loop[-i] = (loop[-i] * w1) + (loop[0] * (1 - w1))
222 for curve in fcurves:
223 pts = curve.keyframe_points
224 for i in range(len(pts) - 1, -1, -1):
225 pts.remove(pts[i])
227 for c, curve in enumerate(fcurves):
228 pts = curve.keyframe_points
229 for i in range(len(loop)):
230 pts.insert(i + 2, loop[i][c])
232 context.scene.frame_end = flm
235 #simplifyCurves: performes the bulk of the samples to bezier conversion.
236 #IN: curveGroup - which can be a collection of singleFcurves, or grouped (via nested lists) .
237 # error - threshold of permittable error (max distance) of the new beziers to the original data
238 # reparaError - threshold of error where we should try to fix the parameterization rather than split the existing curve. > error, usually by a small constant factor for best performance.
239 # maxIterations - maximum number of iterations of reparameterizations we should attempt. (Newton-Rahpson is not guarenteed to converge, so this is needed).
240 # group_mode - boolean, indicating wether we should place bezier keyframes on the same x (frame), or optimize each individual curve.
241 #OUT: None. Deletes the existing curves and creates the new beziers.
242 def simplifyCurves(curveGroup, error, reparaError, maxIterations, group_mode):
243 #Calculates the unit tangent of point v
244 def unitTangent(v, data_pts):
245 tang = NdVector((0, 0, 0, 0, 0))
246 if v != 0:
247 #If it's not the first point, we can calculate a leftside tangent
248 tang += data_pts[v].co - data_pts[v - 1].co
249 if v != len(data_pts) - 1:
250 #If it's not the last point, we can calculate a rightside tangent
251 tang += data_pts[v + 1].co - data_pts[v].co
252 tang.normalize()
253 return tang
255 #assign parametric u value for each point in original data, via relative arc length
256 #http://en.wikipedia.org/wiki/Arc_length
257 def chordLength(data_pts, s, e):
258 totalLength = 0
259 for pt in data_pts[s:e + 1]:
260 i = pt.index
261 if i == s:
262 chordLength = 0
263 else:
264 chordLength = (data_pts[i].co - data_pts[i - 1].co).length
265 totalLength += chordLength
266 pt.temp = totalLength
267 for pt in data_pts[s:e + 1]:
268 if totalLength == 0:
269 print(s, e)
270 pt.u = (pt.temp / totalLength)
272 # get binomial coefficient lookup table, this function/table is only called with args
273 # (3,0),(3,1),(3,2),(3,3),(2,0),(2,1),(2,2)!
274 binomDict = {(3, 0): 1,
275 (3, 1): 3,
276 (3, 2): 3,
277 (3, 3): 1,
278 (2, 0): 1,
279 (2, 1): 2,
280 (2, 2): 1,
283 #value at pt t of a single bernstein Polynomial
284 def bernsteinPoly(n, i, t):
285 binomCoeff = binomDict[(n, i)]
286 return binomCoeff * pow(t, i) * pow(1 - t, n - i)
288 # fit a single cubic to data points in range [s(tart),e(nd)].
289 def fitSingleCubic(data_pts, s, e):
291 # A - matrix used for calculating C matrices for fitting
292 def A(i, j, s, e, t1, t2):
293 if j == 1:
294 t = t1
295 if j == 2:
296 t = t2
297 u = data_pts[i].u
298 return t * bernsteinPoly(3, j, u)
300 # X component, used for calculating X matrices for fitting
301 def xComponent(i, s, e):
302 di = data_pts[i].co
303 u = data_pts[i].u
304 v0 = data_pts[s].co
305 v3 = data_pts[e].co
306 a = v0 * bernsteinPoly(3, 0, u)
307 b = v0 * bernsteinPoly(3, 1, u)
308 c = v3 * bernsteinPoly(3, 2, u)
309 d = v3 * bernsteinPoly(3, 3, u)
310 return (di - (a + b + c + d))
312 t1 = unitTangent(s, data_pts)
313 t2 = unitTangent(e, data_pts)
314 c11 = sum([A(i, 1, s, e, t1, t2) * A(i, 1, s, e, t1, t2) for i in range(s, e + 1)])
315 c12 = sum([A(i, 1, s, e, t1, t2) * A(i, 2, s, e, t1, t2) for i in range(s, e + 1)])
316 c21 = c12
317 c22 = sum([A(i, 2, s, e, t1, t2) * A(i, 2, s, e, t1, t2) for i in range(s, e + 1)])
319 x1 = sum([xComponent(i, s, e) * A(i, 1, s, e, t1, t2) for i in range(s, e + 1)])
320 x2 = sum([xComponent(i, s, e) * A(i, 2, s, e, t1, t2) for i in range(s, e + 1)])
322 # calculate Determinate of the 3 matrices
323 det_cc = c11 * c22 - c21 * c12
324 det_cx = c11 * x2 - c12 * x1
325 det_xc = x1 * c22 - x2 * c12
327 # if matrix is not homogenous, fudge the data a bit
328 if det_cc == 0:
329 det_cc = 0.01
331 # alpha's are the correct offset for bezier handles
332 alpha0 = det_xc / det_cc # offset from right (first) point
333 alpha1 = det_cx / det_cc # offset from left (last) point
335 sRightHandle = data_pts[s].co.copy()
336 sTangent = t1 * abs(alpha0)
337 sRightHandle += sTangent # position of first pt's handle
338 eLeftHandle = data_pts[e].co.copy()
339 eTangent = t2 * abs(alpha1)
340 eLeftHandle += eTangent # position of last pt's handle.
342 # return a 4 member tuple representing the bezier
343 return (data_pts[s].co,
344 sRightHandle,
345 eLeftHandle,
346 data_pts[e].co)
348 # convert 2 given data points into a cubic bezier.
349 # handles are offset along the tangent at
350 # a 3rd of the length between the points.
351 def fitSingleCubic2Pts(data_pts, s, e):
352 alpha0 = alpha1 = (data_pts[s].co - data_pts[e].co).length / 3
354 sRightHandle = data_pts[s].co.copy()
355 sTangent = unitTangent(s, data_pts) * abs(alpha0)
356 sRightHandle += sTangent # position of first pt's handle
357 eLeftHandle = data_pts[e].co.copy()
358 eTangent = unitTangent(e, data_pts) * abs(alpha1)
359 eLeftHandle += eTangent # position of last pt's handle.
361 #return a 4 member tuple representing the bezier
362 return (data_pts[s].co,
363 sRightHandle,
364 eLeftHandle,
365 data_pts[e].co)
367 #evaluate bezier, represented by a 4 member tuple (pts) at point t.
368 def bezierEval(pts, t):
369 sumVec = NdVector((0, 0, 0, 0, 0))
370 for i in range(4):
371 sumVec += pts[i] * bernsteinPoly(3, i, t)
372 return sumVec
374 #calculate the highest error between bezier and original data
375 #returns the distance and the index of the point where max error occurs.
376 def maxErrorAmount(data_pts, bez, s, e):
377 maxError = 0
378 maxErrorPt = s
379 if e - s < 3:
380 return 0, None
381 for pt in data_pts[s:e + 1]:
382 bezVal = bezierEval(bez, pt.u)
383 normalize_error = pt.co.length
384 if normalize_error == 0:
385 normalize_error = 1
386 tmpError = (pt.co - bezVal).length / normalize_error
387 if tmpError >= maxError:
388 maxError = tmpError
389 maxErrorPt = pt.index
390 return maxError, maxErrorPt
392 #calculated bezier derivative at point t.
393 #That is, tangent of point t.
394 def getBezDerivative(bez, t):
395 n = len(bez) - 1
396 sumVec = NdVector((0, 0, 0, 0, 0))
397 for i in range(n - 1):
398 sumVec += (bez[i + 1] - bez[i]) * bernsteinPoly(n - 1, i, t)
399 return sumVec
401 #use Newton-Raphson to find a better paramterization of datapoints,
402 #one that minimizes the distance (or error)
403 # between bezier and original data.
404 def newtonRaphson(data_pts, s, e, bez):
405 for pt in data_pts[s:e + 1]:
406 if pt.index == s:
407 pt.u = 0
408 elif pt.index == e:
409 pt.u = 1
410 else:
411 u = pt.u
412 qu = bezierEval(bez, pt.u)
413 qud = getBezDerivative(bez, u)
414 #we wish to minimize f(u),
415 #the squared distance between curve and data
416 fu = (qu - pt.co).length ** 2
417 fud = (2 * (qu.x - pt.co.x) * (qud.x)) - (2 * (qu.y - pt.co.y) * (qud.y))
418 if fud == 0:
419 fu = 0
420 fud = 1
421 pt.u = pt.u - (fu / fud)
423 #Create data_pts, a list of dataPoint type, each is assigned index i, and an NdVector
424 def createDataPts(curveGroup, group_mode):
425 make_editable_fcurves(curveGroup if group_mode else (curveGroup,))
427 if group_mode:
428 print([x.data_path for x in curveGroup])
429 comp_cos = (0,) * (4 - len(curveGroup)) # We need to add that number of null cos to get our 5D vector.
430 kframes = sorted(set(kf.co.x for fc in curveGroup for kf in fc.keyframe_points))
431 data_pts = [dataPoint(i, NdVector((fra,) + tuple(fc.evaluate(fra) for fc in curveGroup) + comp_cos))
432 for i, fra in enumerate(kframes)]
433 else:
434 data_pts = [dataPoint(i, NdVector((kf.co.x, kf.co.y, 0, 0, 0)))
435 for i, kf in enumerate(curveGroup.keyframe_points)]
436 return data_pts
438 #Recursively fit cubic beziers to the data_pts between s and e
439 def fitCubic(data_pts, s, e):
440 # if there are less than 3 points, fit a single basic bezier
441 if e - s < 3:
442 bez = fitSingleCubic2Pts(data_pts, s, e)
443 else:
444 #if there are more, parameterize the points
445 # and fit a single cubic bezier
446 chordLength(data_pts, s, e)
447 bez = fitSingleCubic(data_pts, s, e)
449 #calculate max error and point where it occurs
450 maxError, maxErrorPt = maxErrorAmount(data_pts, bez, s, e)
451 #if error is small enough, reparameterization might be enough
452 if maxError < reparaError and maxError > error:
453 for i in range(maxIterations):
454 newtonRaphson(data_pts, s, e, bez)
455 if e - s < 3:
456 bez = fitSingleCubic2Pts(data_pts, s, e)
457 else:
458 bez = fitSingleCubic(data_pts, s, e)
460 #recalculate max error and point where it occurs
461 maxError, maxErrorPt = maxErrorAmount(data_pts, bez, s, e)
463 #repara wasn't enough, we need 2 beziers for this range.
464 #Split the bezier at point of maximum error
465 if maxError > error:
466 fitCubic(data_pts, s, maxErrorPt)
467 fitCubic(data_pts, maxErrorPt, e)
468 else:
469 #error is small enough, return the beziers.
470 beziers.append(bez)
471 return
473 # deletes the sampled points and creates beziers.
474 def createNewCurves(curveGroup, beziers, group_mode):
475 #remove all existing data points
476 if group_mode:
477 for fcurve in curveGroup:
478 for i in range(len(fcurve.keyframe_points) - 1, 0, -1):
479 fcurve.keyframe_points.remove(fcurve.keyframe_points[i], fast=True)
480 else:
481 fcurve = curveGroup
482 for i in range(len(fcurve.keyframe_points) - 1, 0, -1):
483 fcurve.keyframe_points.remove(fcurve.keyframe_points[i], fast=True)
485 #insert the calculated beziers to blender data.
486 if group_mode:
487 for fullbez in beziers:
488 for i, fcurve in enumerate(curveGroup):
489 bez = [Vector((vec[0], vec[i + 1])) for vec in fullbez]
490 newKey = fcurve.keyframe_points.insert(frame=bez[0].x, value=bez[0].y, options={'FAST'})
491 newKey.handle_right = (bez[1].x, bez[1].y)
493 newKey = fcurve.keyframe_points.insert(frame=bez[3].x, value=bez[3].y, options={'FAST'})
494 newKey.handle_left = (bez[2].x, bez[2].y)
495 else:
496 for bez in beziers:
497 for vec in bez:
498 vec.resize_2d()
499 newKey = fcurve.keyframe_points.insert(frame=bez[0].x, value=bez[0].y, options={'FAST'})
500 newKey.handle_right = (bez[1].x, bez[1].y)
502 newKey = fcurve.keyframe_points.insert(frame=bez[3].x, value=bez[3].y, options={'FAST'})
503 newKey.handle_left = (bez[2].x, bez[2].y)
505 # We used fast remove/insert, time to update the curves!
506 for fcurve in (curveGroup if group_mode else (curveGroup,)):
507 fcurve.update()
509 # indices are detached from data point's frame (x) value and
510 # stored in the dataPoint object, represent a range
512 data_pts = createDataPts(curveGroup, group_mode)
514 if not data_pts:
515 return
517 s = 0 # start
518 e = len(data_pts) - 1 # end
520 beziers = []
522 #begin the recursive fitting algorithm.
523 fitCubic(data_pts, s, e)
525 #remove old Fcurves and insert the new ones
526 createNewCurves(curveGroup, beziers, group_mode)
529 #Main function of simplification, which called by Operator
530 #IN:
531 # sel_opt- either "sel" (selected) or "all" for which curves to effect
532 # error- maximum error allowed, in fraction (20% = 0.0020, which is the default),
533 # i.e. divide by 10000 from percentage wanted.
534 # group_mode- boolean, to analyze each curve seperately or in groups,
535 # where a group is all curves that effect the same property/RNA path
536 def fcurves_simplify(context, obj, sel_opt="all", error=0.002, group_mode=True):
537 # main vars
538 fcurves = obj.animation_data.action.fcurves
540 if sel_opt == "sel":
541 sel_fcurves = [fcurve for fcurve in fcurves if fcurve.select]
542 else:
543 sel_fcurves = fcurves[:]
545 #Error threshold for Newton Raphson reparamatizing
546 reparaError = error * 32
547 maxIterations = 16
549 if group_mode:
550 fcurveDict = {}
551 #this loop sorts all the fcurves into groups of 3 or 4,
552 #based on their RNA Data path, which corresponds to
553 #which property they effect
554 for curve in sel_fcurves:
555 if curve.data_path in fcurveDict: # if this bone has been added, append the curve to its list
556 fcurveDict[curve.data_path].append(curve)
557 else:
558 fcurveDict[curve.data_path] = [curve] # new bone, add a new dict value with this first curve
559 fcurveGroups = fcurveDict.values()
560 else:
561 fcurveGroups = sel_fcurves
563 if error > 0.00000:
564 #simplify every selected curve.
565 totalt = 0
566 for i, fcurveGroup in enumerate(fcurveGroups):
567 print("Processing curve " + str(i + 1) + "/" + str(len(fcurveGroups)))
568 t = time.clock()
569 simplifyCurves(fcurveGroup, error, reparaError, maxIterations, group_mode)
570 t = time.clock() - t
571 print(str(t)[:5] + " seconds to process last curve")
572 totalt += t
573 print(str(totalt)[:5] + " seconds, total time elapsed")
575 return
578 def detect_min_max(v):
580 Converted from MATLAB script at http://billauer.co.il/peakdet.html
582 Yields indices of peaks, i.e. local minima/maxima.
584 % Eli Billauer, 3.4.05 (Explicitly not copyrighted).
585 % This function is released to the public domain; Any use is allowed.
588 min_val, max_val = float('inf'), -float('inf')
590 check_max = True
592 for i, val in enumerate(v):
593 if val > max_val:
594 max_val = val
595 if val < min_val:
596 min_val = val
598 if check_max:
599 if val < max_val:
600 yield i
601 min_val = val
602 check_max = False
603 else:
604 if val > min_val:
605 yield i
606 max_val = val
607 check_max = True
610 def denoise(obj, fcurves):
612 Implementation of non-linear blur filter.
613 Finds spikes in the fcurve, and replaces spikes that are too big with the average of the surrounding keyframes.
615 make_editable_fcurves(fcurves)
617 for fcurve in fcurves:
618 org_pts = fcurve.keyframe_points[:]
620 for idx in detect_min_max(pt.co.y for pt in fcurve.keyframe_points[1:-1]):
621 # Find the neighbours
622 prev_pt = org_pts[idx - 1].co.y
623 next_pt = org_pts[idx + 1].co.y
624 this_pt = org_pts[idx]
626 # Check the distance from the min/max to the average of the surrounding points.
627 avg = (prev_pt + next_pt) / 2
628 is_peak = abs(this_pt.co.y - avg) > avg * 0.02
630 if is_peak:
631 diff = avg - fcurve.keyframe_points[idx].co.y
632 fcurve.keyframe_points[idx].co.y = avg
633 fcurve.keyframe_points[idx].handle_left.y += diff
634 fcurve.keyframe_points[idx].handle_right.y += diff
636 # Important to update the curve after modifying it!
637 fcurve.update()
640 # Recieves armature, and rotations all bones by 90 degrees along the X axis
641 # This fixes the common axis issue BVH files have when importing.
642 # IN: Armature (bpy.types.Armature)
643 def rotate_fix_armature(arm_data):
644 global_matrix = Matrix.Rotation(radians(90), 4, "X")
645 bpy.ops.object.mode_set(mode='EDIT', toggle=False)
646 #disconnect all bones for ease of global rotation
647 connectedBones = []
648 for bone in arm_data.edit_bones:
649 if bone.use_connect:
650 connectedBones.append(bone.name)
651 bone.use_connect = False
653 #rotate all the bones around their center
654 for bone in arm_data.edit_bones:
655 bone.transform(global_matrix)
657 #reconnect the bones
658 for bone in connectedBones:
659 arm_data.edit_bones[bone].use_connect = True
660 bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
663 #Roughly scales the performer armature to match the enduser armature
664 #IN: perfromer_obj, enduser_obj, Blender objects whose .data is an armature.
665 def scale_fix_armature(performer_obj, enduser_obj):
666 perf_bones = performer_obj.data.bones
667 end_bones = enduser_obj.data.bones
669 def calculateBoundingRadius(bones):
670 # Calculate the average position of each bone
671 center = sum((bone.head_local for bone in bones), Vector())
672 center /= len(bones)
674 # The radius is defined as the max distance from the center.
675 radius = max((bone.head_local - center).length for bone in bones)
676 return radius
678 perf_rad = calculateBoundingRadius(performer_obj.data.bones)
679 end_rad = calculateBoundingRadius(enduser_obj.data.bones)
681 factor = end_rad / perf_rad
682 performer_obj.scale = factor * Vector((1, 1, 1))
685 #Guess Mapping
686 #Given a performer and enduser armature, attempts to guess the hiearchy mapping
687 def guessMapping(performer_obj, enduser_obj):
688 perf_bones = performer_obj.data.bones
689 end_bones = enduser_obj.data.bones
691 root = perf_bones[0]
693 def findBoneSide(bone):
694 if "Left" in bone:
695 return "Left", bone.replace("Left", "").lower().replace(".", "")
696 if "Right" in bone:
697 return "Right", bone.replace("Right", "").lower().replace(".", "")
698 if "L" in bone:
699 return "Left", bone.replace("Left", "").lower().replace(".", "")
700 if "R" in bone:
701 return "Right", bone.replace("Right", "").lower().replace(".", "")
702 return "", bone
704 def nameMatch(bone_a, bone_b):
705 # nameMatch - recieves two strings, returns 2 if they are relatively the same, 1 if they are the same but R and L and 0 if no match at all
706 side_a, noside_a = findBoneSide(bone_a)
707 side_b, noside_b = findBoneSide(bone_b)
708 if side_a == side_b:
709 if noside_a in noside_b or noside_b in noside_a:
710 return 2
711 else:
712 if noside_a in noside_b or noside_b in noside_a:
713 return 1
714 return 0
716 def guessSingleMapping(perf_bone):
717 possible_bones = [end_bones[0]]
719 while possible_bones:
720 for end_bone in possible_bones:
721 match = nameMatch(perf_bone.name, end_bone.name)
722 if match == 2 and not perf_bone.map:
723 perf_bone.map = end_bone.name
724 #~ elif match == 1 and not perf_bone.map:
725 #~ oppo = perf_bones[oppositeBone(perf_bone)].map
726 # if oppo:
727 # perf_bone = oppo
728 newPossibleBones = []
729 for end_bone in possible_bones:
730 newPossibleBones += list(end_bone.children)
731 possible_bones = newPossibleBones
733 for child in perf_bone.children:
734 guessSingleMapping(child)
736 guessSingleMapping(root)
739 # Creates limit rotation constraints on the enduser armature based on range of motion (max min of fcurves) of the performer.
740 # IN: context (bpy.context, etc.), and 2 blender objects which are armatures
741 # OUT: creates the limit constraints.
742 def limit_dof(context, performer_obj, enduser_obj):
743 limitDict = {}
744 perf_bones = [bone for bone in performer_obj.pose.bones if bone.bone.map]
745 c_frame = context.scene.frame_current
746 for bone in perf_bones:
747 limitDict[bone.bone.map] = [1000, 1000, 1000, -1000, -1000, -1000]
748 for t in range(context.scene.frame_start, context.scene.frame_end):
749 context.scene.frame_set(t)
750 for bone in perf_bones:
751 end_bone = enduser_obj.pose.bones[bone.bone.map]
752 bake_matrix = bone.matrix
753 rest_matrix = end_bone.bone.matrix_local
755 if end_bone.parent and end_bone.bone.use_inherit_rotation:
756 srcParent = bone.parent
757 parent_mat = srcParent.matrix
758 parent_rest = end_bone.parent.bone.matrix_local
759 parent_rest_inv = parent_rest.inverted()
760 parent_mat_inv = parent_mat.inverted()
761 bake_matrix = parent_mat_inv * bake_matrix
762 rest_matrix = parent_rest_inv * rest_matrix
764 rest_matrix_inv = rest_matrix.inverted()
765 bake_matrix = rest_matrix_inv * bake_matrix
767 mat = bake_matrix
768 euler = mat.to_euler()
769 limitDict[bone.bone.map][0] = min(limitDict[bone.bone.map][0], euler.x)
770 limitDict[bone.bone.map][1] = min(limitDict[bone.bone.map][1], euler.y)
771 limitDict[bone.bone.map][2] = min(limitDict[bone.bone.map][2], euler.z)
772 limitDict[bone.bone.map][3] = max(limitDict[bone.bone.map][3], euler.x)
773 limitDict[bone.bone.map][4] = max(limitDict[bone.bone.map][4], euler.y)
774 limitDict[bone.bone.map][5] = max(limitDict[bone.bone.map][5], euler.z)
775 for bone in enduser_obj.pose.bones:
776 existingConstraint = [constraint for constraint in bone.constraints if constraint.name == "DOF Limitation"]
777 if existingConstraint:
778 bone.constraints.remove(existingConstraint[0])
779 end_bones = [bone for bone in enduser_obj.pose.bones if bone.name in limitDict.keys()]
780 for bone in end_bones:
781 #~ if not bone.is_in_ik_chain:
782 newCons = bone.constraints.new("LIMIT_ROTATION")
783 newCons.name = "DOF Limitation"
784 newCons.owner_space = "LOCAL"
785 newCons.min_x, newCons.min_y, newCons.min_z, newCons.max_x, newCons.max_y, newCons.max_z = limitDict[bone.name]
786 newCons.use_limit_x = True
787 newCons.use_limit_y = True
788 newCons.use_limit_z = True
789 context.scene.frame_set(c_frame)
792 # Removes the constraints that were added by limit_dof on the enduser_obj
793 def limit_dof_toggle_off(context, enduser_obj):
794 for bone in enduser_obj.pose.bones:
795 existingConstraint = [constraint for constraint in bone.constraints if constraint.name == "DOF Limitation"]
796 if existingConstraint:
797 bone.constraints.remove(existingConstraint[0])
800 # Reparameterizes a blender path via keyframing it's eval_time to match a stride_object's forward velocity.
801 # IN: Context, stride object (blender object with location keyframes), path object.
802 def path_editing(context, stride_obj, path):
803 y_fcurve = [fcurve for fcurve in stride_obj.animation_data.action.fcurves if fcurve.data_path == "location"][1]
804 s, e = context.scene.frame_start, context.scene.frame_end # y_fcurve.range()
805 s = int(s)
806 e = int(e)
807 y_s = y_fcurve.evaluate(s)
808 y_e = y_fcurve.evaluate(e)
809 direction = (y_e - y_s) / abs(y_e - y_s)
810 existing_cons = [constraint for constraint in stride_obj.constraints if constraint.type == "FOLLOW_PATH"]
811 for cons in existing_cons:
812 stride_obj.constraints.remove(cons)
813 path_cons = stride_obj.constraints.new("FOLLOW_PATH")
814 if direction < 0:
815 path_cons.forward_axis = "TRACK_NEGATIVE_Y"
816 else:
817 path_cons.forward_axis = "FORWARD_Y"
818 path_cons.target = path
819 path_cons.use_curve_follow = True
820 path.data.path_duration = e - s
821 try:
822 path.data.animation_data.action.fcurves
823 except AttributeError:
824 path.data.keyframe_insert("eval_time", frame=0)
825 eval_time_fcurve = [fcurve for fcurve in path.data.animation_data.action.fcurves if fcurve.data_path == "eval_time"]
826 eval_time_fcurve = eval_time_fcurve[0]
827 totalLength = 0
828 parameterization = {}
829 print("evaluating curve")
830 for t in range(s, e - 1):
831 if s == t:
832 chordLength = 0
833 else:
834 chordLength = (y_fcurve.evaluate(t) - y_fcurve.evaluate(t + 1))
835 totalLength += chordLength
836 parameterization[t] = totalLength
837 for t in range(s + 1, e - 1):
838 if totalLength == 0:
839 print("no forward motion")
840 parameterization[t] /= totalLength
841 parameterization[t] *= e - s
842 parameterization[e] = e - s
843 for t in parameterization.keys():
844 eval_time_fcurve.keyframe_points.insert(frame=t, value=parameterization[t])
845 y_fcurve.mute = True
846 print("finished path editing")
849 #Animation Stitching
850 #Stitches two retargeted animations together via NLA settings.
851 #IN: enduser_obj, a blender armature that has had two retargets applied.
852 def anim_stitch(context, enduser_obj):
853 stitch_settings = enduser_obj.data.stitch_settings
854 action_1 = stitch_settings.first_action
855 action_2 = stitch_settings.second_action
856 if stitch_settings.stick_bone != "":
857 selected_bone = enduser_obj.pose.bones[stitch_settings.stick_bone]
858 else:
859 selected_bone = enduser_obj.pose.bones[0]
860 scene = context.scene
861 TrackNamesA = enduser_obj.data.mocapNLATracks[action_1]
862 TrackNamesB = enduser_obj.data.mocapNLATracks[action_2]
863 enduser_obj.data.active_mocap = action_1
864 anim_data = enduser_obj.animation_data
865 # add tracks for action 2
866 mocapAction = bpy.data.actions[TrackNamesB.base_track]
867 mocapTrack = anim_data.nla_tracks.new()
868 mocapTrack.name = TrackNamesB.base_track
869 mocapStrip = mocapTrack.strips.new(TrackNamesB.base_track, stitch_settings.blend_frame, mocapAction)
870 mocapStrip.extrapolation = "HOLD_FORWARD"
871 mocapStrip.blend_in = stitch_settings.blend_amount
872 mocapStrip.action_frame_start += stitch_settings.second_offset
873 mocapStrip.action_frame_end += stitch_settings.second_offset
874 constraintTrack = anim_data.nla_tracks.new()
875 constraintTrack.name = TrackNamesB.auto_fix_track
876 constraintAction = bpy.data.actions[TrackNamesB.auto_fix_track]
877 constraintStrip = constraintTrack.strips.new(TrackNamesB.auto_fix_track, stitch_settings.blend_frame, constraintAction)
878 constraintStrip.extrapolation = "HOLD_FORWARD"
879 constraintStrip.blend_in = stitch_settings.blend_amount
880 userTrack = anim_data.nla_tracks.new()
881 userTrack.name = TrackNamesB.manual_fix_track
882 userAction = bpy.data.actions[TrackNamesB.manual_fix_track]
883 userStrip = userTrack.strips.new(TrackNamesB.manual_fix_track, stitch_settings.blend_frame, userAction)
884 userStrip.extrapolation = "HOLD_FORWARD"
885 userStrip.blend_in = stitch_settings.blend_amount
886 #stride bone
887 if enduser_obj.parent:
888 if enduser_obj.parent.name == "stride_bone":
889 stride_bone = enduser_obj.parent
890 stride_anim_data = stride_bone.animation_data
891 stride_anim_data.use_nla = True
892 stride_anim_data.action = None
893 for track in stride_anim_data.nla_tracks:
894 stride_anim_data.nla_tracks.remove(track)
895 actionATrack = stride_anim_data.nla_tracks.new()
896 actionATrack.name = TrackNamesA.stride_action
897 actionAStrip = actionATrack.strips.new(TrackNamesA.stride_action, 0, bpy.data.actions[TrackNamesA.stride_action])
898 actionAStrip.extrapolation = "NOTHING"
899 actionBTrack = stride_anim_data.nla_tracks.new()
900 actionBTrack.name = TrackNamesB.stride_action
901 actionBStrip = actionBTrack.strips.new(TrackNamesB.stride_action, stitch_settings.blend_frame, bpy.data.actions[TrackNamesB.stride_action])
902 actionBStrip.action_frame_start += stitch_settings.second_offset
903 actionBStrip.action_frame_end += stitch_settings.second_offset
904 actionBStrip.extrapolation = "NOTHING"
905 #we need to change the stride_bone's action to add the offset
906 aStrideCurves = [fcurve for fcurve in bpy.data.actions[TrackNamesA.stride_action].fcurves if fcurve.data_path == "location"]
907 bStrideCurves = [fcurve for fcurve in bpy.data.actions[TrackNamesB.stride_action].fcurves if fcurve.data_path == "location"]
908 scene.frame_set(stitch_settings.blend_frame - 1)
909 desired_pos = (enduser_obj.matrix_world * selected_bone.matrix.to_translation())
910 scene.frame_set(stitch_settings.blend_frame)
911 actual_pos = (enduser_obj.matrix_world * selected_bone.matrix.to_translation())
912 print(desired_pos, actual_pos)
913 offset = Vector(actual_pos) - Vector(desired_pos)
915 for i, fcurve in enumerate(bStrideCurves):
916 print(offset[i], i, fcurve.array_index)
917 for pt in fcurve.keyframe_points:
918 pt.co.y -= offset[i]
919 pt.handle_left.y -= offset[i]
920 pt.handle_right.y -= offset[i]
922 #actionBStrip.blend_in = stitch_settings.blend_amount
925 #Guesses setting for animation stitching via Cross Correlation
926 def guess_anim_stitch(context, enduser_obj):
927 stitch_settings = enduser_obj.data.stitch_settings
928 action_1 = stitch_settings.first_action
929 action_2 = stitch_settings.second_action
930 TrackNamesA = enduser_obj.data.mocapNLATracks[action_1]
931 TrackNamesB = enduser_obj.data.mocapNLATracks[action_2]
932 mocapA = bpy.data.actions[TrackNamesA.base_track]
933 mocapB = bpy.data.actions[TrackNamesB.base_track]
934 curvesA = mocapA.fcurves
935 curvesB = mocapB.fcurves
936 flm, s, data = crossCorrelationMatch(curvesA, curvesB, 10)
937 print("Guessed the following for start and offset: ", s, flm)
938 enduser_obj.data.stitch_settings.blend_frame = flm
939 enduser_obj.data.stitch_settings.second_offset = s