1 # ##### BEGIN GPL LICENSE BLOCK #####
3 # This program is free software; you can redistribute it and/or
4 # modify it under the terms of the GNU General Public License
5 # as published by the Free Software Foundation; either version 2
6 # of the License, or (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, write to the Free Software Foundation,
15 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
17 # ##### END GPL LICENSE BLOCK #####
19 from math
import sqrt
, radians
, floor
, ceil
22 from mathutils
import Vector
, Matrix
25 # A Python implementation of n sized Vectors.
26 # Mathutils has a max size of 4, and we need at least 5 for Simplify Curves and even more for Cross Correlation.
27 # Vector utility functions
31 def __init__(self
, vec
):
37 def __mul__(self
, otherMember
):
38 # assume anything with list access is a vector
39 if isinstance(otherMember
, NdVector
):
43 return sum([a
[i
] * b
[i
] for i
in range(n
)])
46 return NdVector([otherMember
* x
for x
in self
.vec
])
48 def __sub__(self
, otherVec
):
52 return NdVector([a
[i
] - b
[i
] for i
in range(n
)])
54 def __add__(self
, otherVec
):
58 return NdVector([a
[i
] + b
[i
] for i
in range(n
)])
60 def __div__(self
, scalar
):
61 return NdVector([x
/ scalar
for x
in self
.vec
])
65 return sqrt(self
* self
)
74 self
.vec
= [x
/ len for x
in self
.vec
]
77 return NdVector(self
.vec
)
79 def __getitem__(self
, i
):
91 return Vector((self
.x
, self
.y
))
94 #Sampled Data Point class for Simplify Curves
97 # x,y1,y2,y3 coordinate of original point
98 co
= NdVector((0, 0, 0, 0, 0))
99 #position according to parametric view of original data, [0,1] range
101 #use this for anything
104 def __init__(self
, index
, co
, u
=0):
110 # Helper to convert from a sampled fcurve back to editable keyframes one.
111 def make_editable_fcurves(fcurves
):
113 if fc
.sampled_points
:
114 fc
.convert_to_keyframes(floor(fc
.sampled_points
[0].co
[0]), ceil(fc
.sampled_points
[-1].co
[0]) + 1)
117 #Cross Correlation Function
118 #http://en.wikipedia.org/wiki/Cross_correlation
119 #IN: curvesA, curvesB - bpy_collection/list of fcurves to analyze. Auto-Correlation is when they are the same.
120 # margin - When searching for the best "start" frame, how large a neighborhood of frames should we inspect (similar to epsilon in Calculus)
121 #OUT: startFrame, length of new anim, and curvesA
122 def crossCorrelationMatch(curvesA
, curvesB
, margin
):
125 start
= int(max(curvesA
[0].range()[0], curvesB
[0].range()[0]))
126 end
= int(min(curvesA
[0].range()[1], curvesB
[0].range()[1]))
128 #transfer all fcurves data on each frame to a single NdVector.
129 for i
in range(1, end
):
131 for fcurve
in curvesA
:
132 if fcurve
.data_path
in [otherFcurve
.data_path
for otherFcurve
in curvesB
]:
133 vec
.append(fcurve
.evaluate(i
))
134 dataA
.append(NdVector(vec
))
136 for fcurve
in curvesB
:
137 if fcurve
.data_path
in [otherFcurve
.data_path
for otherFcurve
in curvesA
]:
138 vec
.append(fcurve
.evaluate(i
))
139 dataB
.append(NdVector(vec
))
141 #Comparator for Cross Correlation. "Classic" implementation uses dot product, as do we.
145 #Create Rxy, which holds the Cross Correlation data.
149 for j
in range(i
, min(i
+ N
, N
)):
150 Rxy
[i
] += comp(dataA
[j
], dataB
[j
- i
])
152 Rxy
[i
] += comp(dataA
[j
], dataB
[j
- i
+ N
])
155 #Find the Local maximums in the Cross Correlation data via numerical derivative.
156 def LocalMaximums(Rxy
):
157 Rxyd
= [Rxy
[i
] - Rxy
[i
- 1] for i
in range(1, len(Rxy
))]
159 for i
in range(1, len(Rxyd
) - 1):
162 #sign change (zerocrossing) at point i, denoting max point (only)
163 if (a
>= 0 and b
< 0) or (a
< 0 and b
>= 0):
164 maxs
.append((i
, max(Rxy
[i
], Rxy
[i
- 1])))
165 return [x
[0] for x
in maxs
]
166 #~ return max(maxs, key=lambda x: x[1])[0]
168 #flms - the possible offsets of the first part of the animation. In Auto-Corr, this is the length of the loop.
169 flms
= LocalMaximums(Rxy
[0:int(len(Rxy
))])
172 #for every local maximum, find the best one - i.e. also has the best start frame.
176 for i
in range(len(dataA
) - flm
):
177 diff
.append((dataA
[i
] - dataB
[i
+ flm
]).lengthSq
)
179 def lowerErrorSlice(diff
, e
):
180 #index, error at index
181 bestSlice
= (0, 100000)
182 for i
in range(e
, len(diff
) - e
):
183 errorSlice
= sum(diff
[i
- e
:i
+ e
+ 1])
184 if errorSlice
< bestSlice
[1]:
185 bestSlice
= (i
, errorSlice
, flm
)
188 s
= lowerErrorSlice(diff
, margin
)
191 #Find the best result and return it.
192 ss
.sort(key
=lambda x
: x
[1])
193 return ss
[0][2], ss
[0][0], dataA
196 #Uses auto correlation (cross correlation of the same set of curves) and trims the active_object's fcurves
197 #Except for location curves (which in mocap tend to be not cyclic, e.g. a walk cycle forward)
198 #Transfers the fcurve data to a list of NdVector (length of list is number of fcurves), and calls the cross correlation function.
199 #Then trims the fcurve accordingly.
200 #IN: Nothing, set the object you want as active and call. Assumes object has animation_data.action!
201 #OUT: Trims the object's fcurves (except location curves).
203 context
= bpy
.context
204 obj
= context
.active_object
207 x
.data_path
== "location"
209 fcurves
= [x
for x
in obj
.animation_data
.action
.fcurves
if not locCurve(x
)]
213 flm
, s
, data
= crossCorrelationMatch(fcurves
, fcurves
, margin
)
214 loop
= data
[s
:s
+ flm
]
216 #performs blending with a root falloff on the seam's neighborhood to ensure good tiling.
217 for i
in range(1, margin
+ 1):
218 w1
= sqrt(float(i
) / margin
)
219 loop
[-i
] = (loop
[-i
] * w1
) + (loop
[0] * (1 - w1
))
221 for curve
in fcurves
:
222 pts
= curve
.keyframe_points
223 for i
in range(len(pts
) - 1, -1, -1):
226 for c
, curve
in enumerate(fcurves
):
227 pts
= curve
.keyframe_points
228 for i
in range(len(loop
)):
229 pts
.insert(i
+ 2, loop
[i
][c
])
231 context
.scene
.frame_end
= flm
234 #simplifyCurves: performs the bulk of the samples to bezier conversion.
235 #IN: curveGroup - which can be a collection of singleFcurves, or grouped (via nested lists) .
236 # error - threshold of permittable error (max distance) of the new beziers to the original data
237 # reparaError - threshold of error where we should try to fix the parameterization rather than split the existing curve. > error, usually by a small constant factor for best performance.
238 # maxIterations - maximum number of iterations of reparameterizations we should attempt. (Newton-Rahpson is not guaranteed to converge, so this is needed).
239 # group_mode - boolean, indicating whether we should place bezier keyframes on the same x (frame), or optimize each individual curve.
240 #OUT: None. Deletes the existing curves and creates the new beziers.
241 def simplifyCurves(curveGroup
, error
, reparaError
, maxIterations
, group_mode
):
242 #Calculates the unit tangent of point v
243 def unitTangent(v
, data_pts
):
244 tang
= NdVector((0, 0, 0, 0, 0))
246 #If it's not the first point, we can calculate a leftside tangent
247 tang
+= data_pts
[v
].co
- data_pts
[v
- 1].co
248 if v
!= len(data_pts
) - 1:
249 #If it's not the last point, we can calculate a rightside tangent
250 tang
+= data_pts
[v
+ 1].co
- data_pts
[v
].co
254 #assign parametric u value for each point in original data, via relative arc length
255 #http://en.wikipedia.org/wiki/Arc_length
256 def chordLength(data_pts
, s
, e
):
258 for pt
in data_pts
[s
:e
+ 1]:
263 chordLength
= (data_pts
[i
].co
- data_pts
[i
- 1].co
).length
264 totalLength
+= chordLength
265 pt
.temp
= totalLength
266 for pt
in data_pts
[s
:e
+ 1]:
269 pt
.u
= (pt
.temp
/ totalLength
)
271 # get binomial coefficient lookup table, this function/table is only called with args
272 # (3,0),(3,1),(3,2),(3,3),(2,0),(2,1),(2,2)!
273 binomDict
= {(3, 0): 1,
282 #value at pt t of a single bernstein Polynomial
283 def bernsteinPoly(n
, i
, t
):
284 binomCoeff
= binomDict
[(n
, i
)]
285 return binomCoeff
* pow(t
, i
) * pow(1 - t
, n
- i
)
287 # fit a single cubic to data points in range [s(tart),e(nd)].
288 def fitSingleCubic(data_pts
, s
, e
):
290 # A - matrix used for calculating C matrices for fitting
291 def A(i
, j
, s
, e
, t1
, t2
):
297 return t
* bernsteinPoly(3, j
, u
)
299 # X component, used for calculating X matrices for fitting
300 def xComponent(i
, s
, e
):
305 a
= v0
* bernsteinPoly(3, 0, u
)
306 b
= v0
* bernsteinPoly(3, 1, u
)
307 c
= v3
* bernsteinPoly(3, 2, u
)
308 d
= v3
* bernsteinPoly(3, 3, u
)
309 return (di
- (a
+ b
+ c
+ d
))
311 t1
= unitTangent(s
, data_pts
)
312 t2
= unitTangent(e
, data_pts
)
313 c11
= sum([A(i
, 1, s
, e
, t1
, t2
) * A(i
, 1, s
, e
, t1
, t2
) for i
in range(s
, e
+ 1)])
314 c12
= sum([A(i
, 1, s
, e
, t1
, t2
) * A(i
, 2, s
, e
, t1
, t2
) for i
in range(s
, e
+ 1)])
316 c22
= sum([A(i
, 2, s
, e
, t1
, t2
) * A(i
, 2, s
, e
, t1
, t2
) for i
in range(s
, e
+ 1)])
318 x1
= sum([xComponent(i
, s
, e
) * A(i
, 1, s
, e
, t1
, t2
) for i
in range(s
, e
+ 1)])
319 x2
= sum([xComponent(i
, s
, e
) * A(i
, 2, s
, e
, t1
, t2
) for i
in range(s
, e
+ 1)])
321 # calculate Determinate of the 3 matrices
322 det_cc
= c11
* c22
- c21
* c12
323 det_cx
= c11
* x2
- c12
* x1
324 det_xc
= x1
* c22
- x2
* c12
326 # if matrix is not homogenous, fudge the data a bit
330 # alpha's are the correct offset for bezier handles
331 alpha0
= det_xc
/ det_cc
# offset from right (first) point
332 alpha1
= det_cx
/ det_cc
# offset from left (last) point
334 sRightHandle
= data_pts
[s
].co
.copy()
335 sTangent
= t1
* abs(alpha0
)
336 sRightHandle
+= sTangent
# position of first pt's handle
337 eLeftHandle
= data_pts
[e
].co
.copy()
338 eTangent
= t2
* abs(alpha1
)
339 eLeftHandle
+= eTangent
# position of last pt's handle.
341 # return a 4 member tuple representing the bezier
342 return (data_pts
[s
].co
,
347 # convert 2 given data points into a cubic bezier.
348 # handles are offset along the tangent at
349 # a 3rd of the length between the points.
350 def fitSingleCubic2Pts(data_pts
, s
, e
):
351 alpha0
= alpha1
= (data_pts
[s
].co
- data_pts
[e
].co
).length
/ 3
353 sRightHandle
= data_pts
[s
].co
.copy()
354 sTangent
= unitTangent(s
, data_pts
) * abs(alpha0
)
355 sRightHandle
+= sTangent
# position of first pt's handle
356 eLeftHandle
= data_pts
[e
].co
.copy()
357 eTangent
= unitTangent(e
, data_pts
) * abs(alpha1
)
358 eLeftHandle
+= eTangent
# position of last pt's handle.
360 #return a 4 member tuple representing the bezier
361 return (data_pts
[s
].co
,
366 #evaluate bezier, represented by a 4 member tuple (pts) at point t.
367 def bezierEval(pts
, t
):
368 sumVec
= NdVector((0, 0, 0, 0, 0))
370 sumVec
+= pts
[i
] * bernsteinPoly(3, i
, t
)
373 #calculate the highest error between bezier and original data
374 #returns the distance and the index of the point where max error occurs.
375 def maxErrorAmount(data_pts
, bez
, s
, e
):
380 for pt
in data_pts
[s
:e
+ 1]:
381 bezVal
= bezierEval(bez
, pt
.u
)
382 normalize_error
= pt
.co
.length
383 if normalize_error
== 0:
385 tmpError
= (pt
.co
- bezVal
).length
/ normalize_error
386 if tmpError
>= maxError
:
388 maxErrorPt
= pt
.index
389 return maxError
, maxErrorPt
391 #calculated bezier derivative at point t.
392 #That is, tangent of point t.
393 def getBezDerivative(bez
, t
):
395 sumVec
= NdVector((0, 0, 0, 0, 0))
396 for i
in range(n
- 1):
397 sumVec
+= (bez
[i
+ 1] - bez
[i
]) * bernsteinPoly(n
- 1, i
, t
)
400 #use Newton-Raphson to find a better parameterization of datapoints,
401 #one that minimizes the distance (or error)
402 # between bezier and original data.
403 def newtonRaphson(data_pts
, s
, e
, bez
):
404 for pt
in data_pts
[s
:e
+ 1]:
411 qu
= bezierEval(bez
, pt
.u
)
412 qud
= getBezDerivative(bez
, u
)
413 #we wish to minimize f(u),
414 #the squared distance between curve and data
415 fu
= (qu
- pt
.co
).length
** 2
416 fud
= (2 * (qu
.x
- pt
.co
.x
) * (qud
.x
)) - (2 * (qu
.y
- pt
.co
.y
) * (qud
.y
))
420 pt
.u
= pt
.u
- (fu
/ fud
)
422 #Create data_pts, a list of dataPoint type, each is assigned index i, and an NdVector
423 def createDataPts(curveGroup
, group_mode
):
424 make_editable_fcurves(curveGroup
if group_mode
else (curveGroup
,))
427 print([x
.data_path
for x
in curveGroup
])
428 comp_cos
= (0,) * (4 - len(curveGroup
)) # We need to add that number of null cos to get our 5D vector.
429 kframes
= sorted(set(kf
.co
.x
for fc
in curveGroup
for kf
in fc
.keyframe_points
))
430 data_pts
= [dataPoint(i
, NdVector((fra
,) + tuple(fc
.evaluate(fra
) for fc
in curveGroup
) + comp_cos
))
431 for i
, fra
in enumerate(kframes
)]
433 data_pts
= [dataPoint(i
, NdVector((kf
.co
.x
, kf
.co
.y
, 0, 0, 0)))
434 for i
, kf
in enumerate(curveGroup
.keyframe_points
)]
437 #Recursively fit cubic beziers to the data_pts between s and e
438 def fitCubic(data_pts
, s
, e
):
439 # if there are less than 3 points, fit a single basic bezier
441 bez
= fitSingleCubic2Pts(data_pts
, s
, e
)
443 #if there are more, parameterize the points
444 # and fit a single cubic bezier
445 chordLength(data_pts
, s
, e
)
446 bez
= fitSingleCubic(data_pts
, s
, e
)
448 #calculate max error and point where it occurs
449 maxError
, maxErrorPt
= maxErrorAmount(data_pts
, bez
, s
, e
)
450 #if error is small enough, reparameterization might be enough
451 if maxError
< reparaError
and maxError
> error
:
452 for i
in range(maxIterations
):
453 newtonRaphson(data_pts
, s
, e
, bez
)
455 bez
= fitSingleCubic2Pts(data_pts
, s
, e
)
457 bez
= fitSingleCubic(data_pts
, s
, e
)
459 #recalculate max error and point where it occurs
460 maxError
, maxErrorPt
= maxErrorAmount(data_pts
, bez
, s
, e
)
462 #repara wasn't enough, we need 2 beziers for this range.
463 #Split the bezier at point of maximum error
465 fitCubic(data_pts
, s
, maxErrorPt
)
466 fitCubic(data_pts
, maxErrorPt
, e
)
468 #error is small enough, return the beziers.
472 # deletes the sampled points and creates beziers.
473 def createNewCurves(curveGroup
, beziers
, group_mode
):
474 #remove all existing data points
476 for fcurve
in curveGroup
:
477 for i
in range(len(fcurve
.keyframe_points
) - 1, 0, -1):
478 fcurve
.keyframe_points
.remove(fcurve
.keyframe_points
[i
], fast
=True)
481 for i
in range(len(fcurve
.keyframe_points
) - 1, 0, -1):
482 fcurve
.keyframe_points
.remove(fcurve
.keyframe_points
[i
], fast
=True)
484 #insert the calculated beziers to blender data.
486 for fullbez
in beziers
:
487 for i
, fcurve
in enumerate(curveGroup
):
488 bez
= [Vector((vec
[0], vec
[i
+ 1])) for vec
in fullbez
]
489 newKey
= fcurve
.keyframe_points
.insert(frame
=bez
[0].x
, value
=bez
[0].y
, options
={'FAST'})
490 newKey
.handle_right
= (bez
[1].x
, bez
[1].y
)
492 newKey
= fcurve
.keyframe_points
.insert(frame
=bez
[3].x
, value
=bez
[3].y
, options
={'FAST'})
493 newKey
.handle_left
= (bez
[2].x
, bez
[2].y
)
498 newKey
= fcurve
.keyframe_points
.insert(frame
=bez
[0].x
, value
=bez
[0].y
, options
={'FAST'})
499 newKey
.handle_right
= (bez
[1].x
, bez
[1].y
)
501 newKey
= fcurve
.keyframe_points
.insert(frame
=bez
[3].x
, value
=bez
[3].y
, options
={'FAST'})
502 newKey
.handle_left
= (bez
[2].x
, bez
[2].y
)
504 # We used fast remove/insert, time to update the curves!
505 for fcurve
in (curveGroup
if group_mode
else (curveGroup
,)):
508 # indices are detached from data point's frame (x) value and
509 # stored in the dataPoint object, represent a range
511 data_pts
= createDataPts(curveGroup
, group_mode
)
517 e
= len(data_pts
) - 1 # end
521 #begin the recursive fitting algorithm.
522 fitCubic(data_pts
, s
, e
)
524 #remove old Fcurves and insert the new ones
525 createNewCurves(curveGroup
, beziers
, group_mode
)
528 #Main function of simplification, which called by Operator
530 # sel_opt- either "sel" (selected) or "all" for which curves to effect
531 # error- maximum error allowed, in fraction (20% = 0.0020, which is the default),
532 # i.e. divide by 10000 from percentage wanted.
533 # group_mode- boolean, to analyze each curve separately or in groups,
534 # where a group is all curves that effect the same property/RNA path
535 def fcurves_simplify(context
, obj
, sel_opt
="all", error
=0.002, group_mode
=True):
537 fcurves
= obj
.animation_data
.action
.fcurves
540 sel_fcurves
= [fcurve
for fcurve
in fcurves
if fcurve
.select
]
542 sel_fcurves
= fcurves
[:]
544 #Error threshold for Newton Raphson reparamatizing
545 reparaError
= error
* 32
550 #this loop sorts all the fcurves into groups of 3 or 4,
551 #based on their RNA Data path, which corresponds to
552 #which property they effect
553 for curve
in sel_fcurves
:
554 if curve
.data_path
in fcurveDict
: # if this bone has been added, append the curve to its list
555 fcurveDict
[curve
.data_path
].append(curve
)
557 fcurveDict
[curve
.data_path
] = [curve
] # new bone, add a new dict value with this first curve
558 fcurveGroups
= fcurveDict
.values()
560 fcurveGroups
= sel_fcurves
563 #simplify every selected curve.
565 for i
, fcurveGroup
in enumerate(fcurveGroups
):
566 print("Processing curve " + str(i
+ 1) + "/" + str(len(fcurveGroups
)))
568 simplifyCurves(fcurveGroup
, error
, reparaError
, maxIterations
, group_mode
)
570 print(str(t
)[:5] + " seconds to process last curve")
572 print(str(totalt
)[:5] + " seconds, total time elapsed")
577 def detect_min_max(v
):
579 Converted from MATLAB script at http://billauer.co.il/peakdet.html
581 Yields indices of peaks, i.e. local minima/maxima.
583 % Eli Billauer, 3.4.05 (Explicitly not copyrighted).
584 % This function is released to the public domain; Any use is allowed.
587 min_val
, max_val
= float('inf'), -float('inf')
591 for i
, val
in enumerate(v
):
609 def denoise(obj
, fcurves
):
611 Implementation of non-linear blur filter.
612 Finds spikes in the fcurve, and replaces spikes that are too big with the average of the surrounding keyframes.
614 make_editable_fcurves(fcurves
)
616 for fcurve
in fcurves
:
617 org_pts
= fcurve
.keyframe_points
[:]
619 for idx
in detect_min_max(pt
.co
.y
for pt
in fcurve
.keyframe_points
[1:-1]):
620 # Find the neighbours
621 prev_pt
= org_pts
[idx
- 1].co
.y
622 next_pt
= org_pts
[idx
+ 1].co
.y
623 this_pt
= org_pts
[idx
]
625 # Check the distance from the min/max to the average of the surrounding points.
626 avg
= (prev_pt
+ next_pt
) / 2
627 is_peak
= abs(this_pt
.co
.y
- avg
) > avg
* 0.02
630 diff
= avg
- fcurve
.keyframe_points
[idx
].co
.y
631 fcurve
.keyframe_points
[idx
].co
.y
= avg
632 fcurve
.keyframe_points
[idx
].handle_left
.y
+= diff
633 fcurve
.keyframe_points
[idx
].handle_right
.y
+= diff
635 # Important to update the curve after modifying it!
639 # Receives armature, and rotations all bones by 90 degrees along the X axis
640 # This fixes the common axis issue BVH files have when importing.
641 # IN: Armature (bpy.types.Armature)
642 def rotate_fix_armature(arm_data
):
643 global_matrix
= Matrix
.Rotation(radians(90), 4, "X")
644 bpy
.ops
.object.mode_set(mode
='EDIT', toggle
=False)
645 #disconnect all bones for ease of global rotation
647 for bone
in arm_data
.edit_bones
:
649 connectedBones
.append(bone
.name
)
650 bone
.use_connect
= False
652 #rotate all the bones around their center
653 for bone
in arm_data
.edit_bones
:
654 bone
.transform(global_matrix
)
657 for bone
in connectedBones
:
658 arm_data
.edit_bones
[bone
].use_connect
= True
659 bpy
.ops
.object.mode_set(mode
='OBJECT', toggle
=False)
662 #Roughly scales the performer armature to match the enduser armature
663 #IN: perfromer_obj, enduser_obj, Blender objects whose .data is an armature.
664 def scale_fix_armature(performer_obj
, enduser_obj
):
665 perf_bones
= performer_obj
.data
.bones
666 end_bones
= enduser_obj
.data
.bones
668 def calculateBoundingRadius(bones
):
669 # Calculate the average position of each bone
670 center
= sum((bone
.head_local
for bone
in bones
), Vector())
673 # The radius is defined as the max distance from the center.
674 radius
= max((bone
.head_local
- center
).length
for bone
in bones
)
677 perf_rad
= calculateBoundingRadius(performer_obj
.data
.bones
)
678 end_rad
= calculateBoundingRadius(enduser_obj
.data
.bones
)
680 factor
= end_rad
/ perf_rad
681 performer_obj
.scale
= factor
* Vector((1, 1, 1))
685 #Given a performer and enduser armature, attempts to guess the hierarchy mapping
686 def guessMapping(performer_obj
, enduser_obj
):
687 perf_bones
= performer_obj
.data
.bones
688 end_bones
= enduser_obj
.data
.bones
692 def findBoneSide(bone
):
694 return "Left", bone
.replace("Left", "").lower().replace(".", "")
696 return "Right", bone
.replace("Right", "").lower().replace(".", "")
698 return "Left", bone
.replace("Left", "").lower().replace(".", "")
700 return "Right", bone
.replace("Right", "").lower().replace(".", "")
703 def nameMatch(bone_a
, bone_b
):
704 # nameMatch - receives two strings, returns 2 if they are relatively the same, 1 if they are the same but R and L and 0 if no match at all
705 side_a
, noside_a
= findBoneSide(bone_a
)
706 side_b
, noside_b
= findBoneSide(bone_b
)
708 if noside_a
in noside_b
or noside_b
in noside_a
:
711 if noside_a
in noside_b
or noside_b
in noside_a
:
715 def guessSingleMapping(perf_bone
):
716 possible_bones
= [end_bones
[0]]
718 while possible_bones
:
719 for end_bone
in possible_bones
:
720 match
= nameMatch(perf_bone
.name
, end_bone
.name
)
721 if match
== 2 and not perf_bone
.map:
722 perf_bone
.map = end_bone
.name
723 #~ elif match == 1 and not perf_bone.map:
724 #~ oppo = perf_bones[oppositeBone(perf_bone)].map
727 newPossibleBones
= []
728 for end_bone
in possible_bones
:
729 newPossibleBones
+= list(end_bone
.children
)
730 possible_bones
= newPossibleBones
732 for child
in perf_bone
.children
:
733 guessSingleMapping(child
)
735 guessSingleMapping(root
)
738 # Creates limit rotation constraints on the enduser armature based on range of motion (max min of fcurves) of the performer.
739 # IN: context (bpy.context, etc.), and 2 blender objects which are armatures
740 # OUT: creates the limit constraints.
741 def limit_dof(context
, performer_obj
, enduser_obj
):
743 perf_bones
= [bone
for bone
in performer_obj
.pose
.bones
if bone
.bone
.map]
744 c_frame
= context
.scene
.frame_current
745 for bone
in perf_bones
:
746 limitDict
[bone
.bone
.map] = [1000, 1000, 1000, -1000, -1000, -1000]
747 for t
in range(context
.scene
.frame_start
, context
.scene
.frame_end
):
748 context
.scene
.frame_set(t
)
749 for bone
in perf_bones
:
750 end_bone
= enduser_obj
.pose
.bones
[bone
.bone
.map]
751 bake_matrix
= bone
.matrix
752 rest_matrix
= end_bone
.bone
.matrix_local
754 if end_bone
.parent
and end_bone
.bone
.use_inherit_rotation
:
755 srcParent
= bone
.parent
756 parent_mat
= srcParent
.matrix
757 parent_rest
= end_bone
.parent
.bone
.matrix_local
758 parent_rest_inv
= parent_rest
.inverted()
759 parent_mat_inv
= parent_mat
.inverted()
760 bake_matrix
= parent_mat_inv
@ bake_matrix
761 rest_matrix
= parent_rest_inv
@ rest_matrix
763 rest_matrix_inv
= rest_matrix
.inverted()
764 bake_matrix
= rest_matrix_inv
@ bake_matrix
767 euler
= mat
.to_euler()
768 limitDict
[bone
.bone
.map][0] = min(limitDict
[bone
.bone
.map][0], euler
.x
)
769 limitDict
[bone
.bone
.map][1] = min(limitDict
[bone
.bone
.map][1], euler
.y
)
770 limitDict
[bone
.bone
.map][2] = min(limitDict
[bone
.bone
.map][2], euler
.z
)
771 limitDict
[bone
.bone
.map][3] = max(limitDict
[bone
.bone
.map][3], euler
.x
)
772 limitDict
[bone
.bone
.map][4] = max(limitDict
[bone
.bone
.map][4], euler
.y
)
773 limitDict
[bone
.bone
.map][5] = max(limitDict
[bone
.bone
.map][5], euler
.z
)
774 for bone
in enduser_obj
.pose
.bones
:
775 existingConstraint
= [constraint
for constraint
in bone
.constraints
if constraint
.name
== "DOF Limitation"]
776 if existingConstraint
:
777 bone
.constraints
.remove(existingConstraint
[0])
778 end_bones
= [bone
for bone
in enduser_obj
.pose
.bones
if bone
.name
in limitDict
.keys()]
779 for bone
in end_bones
:
780 #~ if not bone.is_in_ik_chain:
781 newCons
= bone
.constraints
.new("LIMIT_ROTATION")
782 newCons
.name
= "DOF Limitation"
783 newCons
.owner_space
= "LOCAL"
784 newCons
.min_x
, newCons
.min_y
, newCons
.min_z
, newCons
.max_x
, newCons
.max_y
, newCons
.max_z
= limitDict
[bone
.name
]
785 newCons
.use_limit_x
= True
786 newCons
.use_limit_y
= True
787 newCons
.use_limit_z
= True
788 context
.scene
.frame_set(c_frame
)
791 # Removes the constraints that were added by limit_dof on the enduser_obj
792 def limit_dof_toggle_off(context
, enduser_obj
):
793 for bone
in enduser_obj
.pose
.bones
:
794 existingConstraint
= [constraint
for constraint
in bone
.constraints
if constraint
.name
== "DOF Limitation"]
795 if existingConstraint
:
796 bone
.constraints
.remove(existingConstraint
[0])
799 # Reparameterizes a blender path via keyframing it's eval_time to match a stride_object's forward velocity.
800 # IN: Context, stride object (blender object with location keyframes), path object.
801 def path_editing(context
, stride_obj
, path
):
802 y_fcurve
= [fcurve
for fcurve
in stride_obj
.animation_data
.action
.fcurves
if fcurve
.data_path
== "location"][1]
803 s
, e
= context
.scene
.frame_start
, context
.scene
.frame_end
# y_fcurve.range()
806 y_s
= y_fcurve
.evaluate(s
)
807 y_e
= y_fcurve
.evaluate(e
)
808 direction
= (y_e
- y_s
) / abs(y_e
- y_s
)
809 existing_cons
= [constraint
for constraint
in stride_obj
.constraints
if constraint
.type == "FOLLOW_PATH"]
810 for cons
in existing_cons
:
811 stride_obj
.constraints
.remove(cons
)
812 path_cons
= stride_obj
.constraints
.new("FOLLOW_PATH")
814 path_cons
.forward_axis
= "TRACK_NEGATIVE_Y"
816 path_cons
.forward_axis
= "FORWARD_Y"
817 path_cons
.target
= path
818 path_cons
.use_curve_follow
= True
819 path
.data
.path_duration
= e
- s
821 path
.data
.animation_data
.action
.fcurves
822 except AttributeError:
823 path
.data
.keyframe_insert("eval_time", frame
=0)
824 eval_time_fcurve
= [fcurve
for fcurve
in path
.data
.animation_data
.action
.fcurves
if fcurve
.data_path
== "eval_time"]
825 eval_time_fcurve
= eval_time_fcurve
[0]
827 parameterization
= {}
828 print("evaluating curve")
829 for t
in range(s
, e
- 1):
833 chordLength
= (y_fcurve
.evaluate(t
) - y_fcurve
.evaluate(t
+ 1))
834 totalLength
+= chordLength
835 parameterization
[t
] = totalLength
836 for t
in range(s
+ 1, e
- 1):
838 print("no forward motion")
839 parameterization
[t
] /= totalLength
840 parameterization
[t
] *= e
- s
841 parameterization
[e
] = e
- s
842 for t
in parameterization
.keys():
843 eval_time_fcurve
.keyframe_points
.insert(frame
=t
, value
=parameterization
[t
])
845 print("finished path editing")
849 #Stitches two retargeted animations together via NLA settings.
850 #IN: enduser_obj, a blender armature that has had two retargets applied.
851 def anim_stitch(context
, enduser_obj
):
852 stitch_settings
= enduser_obj
.data
.stitch_settings
853 action_1
= stitch_settings
.first_action
854 action_2
= stitch_settings
.second_action
855 if stitch_settings
.stick_bone
!= "":
856 selected_bone
= enduser_obj
.pose
.bones
[stitch_settings
.stick_bone
]
858 selected_bone
= enduser_obj
.pose
.bones
[0]
859 scene
= context
.scene
860 TrackNamesA
= enduser_obj
.data
.mocapNLATracks
[action_1
]
861 TrackNamesB
= enduser_obj
.data
.mocapNLATracks
[action_2
]
862 enduser_obj
.data
.active_mocap
= action_1
863 anim_data
= enduser_obj
.animation_data
864 # add tracks for action 2
865 mocapAction
= bpy
.data
.actions
[TrackNamesB
.base_track
]
866 mocapTrack
= anim_data
.nla_tracks
.new()
867 mocapTrack
.name
= TrackNamesB
.base_track
868 mocapStrip
= mocapTrack
.strips
.new(TrackNamesB
.base_track
, stitch_settings
.blend_frame
, mocapAction
)
869 mocapStrip
.extrapolation
= "HOLD_FORWARD"
870 mocapStrip
.blend_in
= stitch_settings
.blend_amount
871 mocapStrip
.action_frame_start
+= stitch_settings
.second_offset
872 mocapStrip
.action_frame_end
+= stitch_settings
.second_offset
873 constraintTrack
= anim_data
.nla_tracks
.new()
874 constraintTrack
.name
= TrackNamesB
.auto_fix_track
875 constraintAction
= bpy
.data
.actions
[TrackNamesB
.auto_fix_track
]
876 constraintStrip
= constraintTrack
.strips
.new(TrackNamesB
.auto_fix_track
, stitch_settings
.blend_frame
, constraintAction
)
877 constraintStrip
.extrapolation
= "HOLD_FORWARD"
878 constraintStrip
.blend_in
= stitch_settings
.blend_amount
879 userTrack
= anim_data
.nla_tracks
.new()
880 userTrack
.name
= TrackNamesB
.manual_fix_track
881 userAction
= bpy
.data
.actions
[TrackNamesB
.manual_fix_track
]
882 userStrip
= userTrack
.strips
.new(TrackNamesB
.manual_fix_track
, stitch_settings
.blend_frame
, userAction
)
883 userStrip
.extrapolation
= "HOLD_FORWARD"
884 userStrip
.blend_in
= stitch_settings
.blend_amount
886 if enduser_obj
.parent
:
887 if enduser_obj
.parent
.name
== "stride_bone":
888 stride_bone
= enduser_obj
.parent
889 stride_anim_data
= stride_bone
.animation_data
890 stride_anim_data
.use_nla
= True
891 stride_anim_data
.action
= None
892 for track
in stride_anim_data
.nla_tracks
:
893 stride_anim_data
.nla_tracks
.remove(track
)
894 actionATrack
= stride_anim_data
.nla_tracks
.new()
895 actionATrack
.name
= TrackNamesA
.stride_action
896 actionAStrip
= actionATrack
.strips
.new(TrackNamesA
.stride_action
, 0, bpy
.data
.actions
[TrackNamesA
.stride_action
])
897 actionAStrip
.extrapolation
= "NOTHING"
898 actionBTrack
= stride_anim_data
.nla_tracks
.new()
899 actionBTrack
.name
= TrackNamesB
.stride_action
900 actionBStrip
= actionBTrack
.strips
.new(TrackNamesB
.stride_action
, stitch_settings
.blend_frame
, bpy
.data
.actions
[TrackNamesB
.stride_action
])
901 actionBStrip
.action_frame_start
+= stitch_settings
.second_offset
902 actionBStrip
.action_frame_end
+= stitch_settings
.second_offset
903 actionBStrip
.extrapolation
= "NOTHING"
904 #we need to change the stride_bone's action to add the offset
905 aStrideCurves
= [fcurve
for fcurve
in bpy
.data
.actions
[TrackNamesA
.stride_action
].fcurves
if fcurve
.data_path
== "location"]
906 bStrideCurves
= [fcurve
for fcurve
in bpy
.data
.actions
[TrackNamesB
.stride_action
].fcurves
if fcurve
.data_path
== "location"]
907 scene
.frame_set(stitch_settings
.blend_frame
- 1)
908 desired_pos
= (enduser_obj
.matrix_world
* selected_bone
.matrix
.to_translation())
909 scene
.frame_set(stitch_settings
.blend_frame
)
910 actual_pos
= (enduser_obj
.matrix_world
* selected_bone
.matrix
.to_translation())
911 print(desired_pos
, actual_pos
)
912 offset
= Vector(actual_pos
) - Vector(desired_pos
)
914 for i
, fcurve
in enumerate(bStrideCurves
):
915 print(offset
[i
], i
, fcurve
.array_index
)
916 for pt
in fcurve
.keyframe_points
:
918 pt
.handle_left
.y
-= offset
[i
]
919 pt
.handle_right
.y
-= offset
[i
]
921 #actionBStrip.blend_in = stitch_settings.blend_amount
924 #Guesses setting for animation stitching via Cross Correlation
925 def guess_anim_stitch(context
, enduser_obj
):
926 stitch_settings
= enduser_obj
.data
.stitch_settings
927 action_1
= stitch_settings
.first_action
928 action_2
= stitch_settings
.second_action
929 TrackNamesA
= enduser_obj
.data
.mocapNLATracks
[action_1
]
930 TrackNamesB
= enduser_obj
.data
.mocapNLATracks
[action_2
]
931 mocapA
= bpy
.data
.actions
[TrackNamesA
.base_track
]
932 mocapB
= bpy
.data
.actions
[TrackNamesB
.base_track
]
933 curvesA
= mocapA
.fcurves
934 curvesB
= mocapB
.fcurves
935 flm
, s
, data
= crossCorrelationMatch(curvesA
, curvesB
, 10)
936 print("Guessed the following for start and offset: ", s
, flm
)
937 enduser_obj
.data
.stitch_settings
.blend_frame
= flm
938 enduser_obj
.data
.stitch_settings
.second_offset
= s